Compare commits

..

2 Commits

Author SHA1 Message Date
d90d77f034 Update karakeep.json 2025-07-04 13:33:28 +02:00
c9047a3d86 karakeep: increase disk space 2025-07-04 13:32:25 +02:00
99 changed files with 1966 additions and 2491 deletions

42
.github/autolabeler-config.json generated vendored
View File

@ -4,7 +4,6 @@
"fileStatus": "added",
"includeGlobs": [
"ct/**",
"tools/**",
"install/**",
"misc/**",
"turnkey/**",
@ -18,13 +17,16 @@
"fileStatus": "modified",
"includeGlobs": [
"ct/**",
"tools/**",
"install/**",
"misc/**",
"turnkey/**",
"vm/**"
],
"excludeGlobs": []
"excludeGlobs": [
"misc/build.func",
"misc/install.func",
"misc/api.func"
]
}
],
"delete script": [
@ -32,7 +34,6 @@
"fileStatus": "removed",
"includeGlobs": [
"ct/**",
"tools/**",
"install/**",
"misc/**",
"turnkey/**",
@ -45,7 +46,11 @@
{
"fileStatus": null,
"includeGlobs": [
"*.md"
"*.md",
".github/**",
"misc/*.func",
"misc/create_lxc.sh",
"api/**"
],
"excludeGlobs": []
}
@ -57,9 +62,7 @@
"misc/*.func",
"misc/create_lxc.sh"
],
"excludeGlobs": [
"misc/api.func"
]
"excludeGlobs": []
}
],
"website": [
@ -68,9 +71,7 @@
"includeGlobs": [
"frontend/**"
],
"excludeGlobs": [
"frontend/public/json/**"
]
"excludeGlobs": []
}
],
"api": [
@ -101,29 +102,22 @@
"excludeGlobs": []
}
],
"addon": [
"high risk": [
{
"fileStatus": null,
"includeGlobs": [
"tools/addon/**"
"misc/build.func",
"misc/install.func",
"misc/create_lxc.sh"
],
"excludeGlobs": []
}
],
"pve-tool": [
"documentation": [
{
"fileStatus": null,
"includeGlobs": [
"tools/pve/**"
],
"excludeGlobs": []
}
],
"vm": [
{
"fileStatus": null,
"includeGlobs": [
"vm/**"
"*.md"
],
"excludeGlobs": []
}

120
.github/changelog-pr-config.json generated vendored
View File

@ -1,148 +1,112 @@
[
{
"title": "🆕 New Scripts",
"labels": [
"new script"
]
"labels": ["new script"]
},
{
"title": "🚀 Updated Scripts",
"labels": [
"update script"
],
"labels": ["update script"],
"subCategories": [
{
"title": "🐞 Bug Fixes",
"labels": [
"bugfix"
],
"notes": []
"labels": ["bugfix"],
"notes" : []
},
{
"title": "✨ New Features",
"labels": [
"feature"
],
"notes": []
"labels": ["feature"],
"notes" : []
},
{
"title": "💥 Breaking Changes",
"labels": [
"breaking change"
],
"notes": []
"labels": ["breaking change"],
"notes" : []
},
{
"title": "🔧 Refactor",
"labels": [
"refactor"
],
"notes": []
"labels": ["refactor"],
"notes" : []
}
]
},
{
"title": "🧰 Maintenance",
"labels": [
"maintenance"
],
"labels": ["maintenance"],
"subCategories": [
{
"title": "🐞 Bug Fixes",
"labels": [
"bugfix"
],
"notes": []
"labels": ["bugfix"],
"notes" : []
},
{
"title": "✨ New Features",
"labels": [
"feature"
],
"notes": []
"labels": ["feature"],
"notes" : []
},
{
"title": "💥 Breaking Changes",
"labels": [
"breaking change"
],
"notes": []
"labels": ["breaking change"],
"notes" : []
},
{
"title": "📡 API",
"labels": [
"api"
],
"notes": []
"labels": ["api"],
"notes" : []
},
{
"title": "💾 Core",
"labels": [
"core"
],
"notes": []
"labels": ["core"],
"notes" : []
},
{
"title": "📂 Github",
"labels": [
"github"
],
"notes": []
"labels": ["github"],
"notes" : []
},
{
"title": "📝 Documentation",
"labels": [
"maintenance"
],
"notes": []
"title" :"📝 Documentation",
"labels": ["documentation"],
"notes" : []
},
{
"title": "🔧 Refactor",
"labels": [
"refactor"
],
"notes": []
"title" :"🔧 Refactor",
"labels": ["refactor"],
"notes" : []
}
]
},
{
"title": "🌐 Website",
"labels": [
"website"
],
"labels": ["website"],
"subCategories": [
{
"title": "🐞 Bug Fixes",
"labels": [
"bugfix"
],
"notes": []
"labels": ["bugfix"],
"notes" : []
},
{
"title": "✨ New Features",
"labels": [
"feature"
],
"notes": []
"labels": ["feature"],
"notes" : []
},
{
"title": "💥 Breaking Changes",
"labels": [
"breaking change"
],
"notes": []
"labels": ["breaking change"],
"notes" : []
},
{
"title": "📝 Script Information",
"labels": [
"json"
],
"notes": []
"labels": ["json"],
"notes" : []
}
]
},
{
"title": "❔ Unlabelled",
"labels": []
},
{
"title": "💥 Breaking Changes",
"labels": ["breaking change"]
}
]

57
.github/workflows/autolabeler.yml generated vendored
View File

@ -1,7 +1,6 @@
name: Auto Label Pull Requests
on:
workflow_dispatch:
pull_request_target:
branches: ["main"]
types: [opened, synchronize, reopened, edited]
@ -20,7 +19,7 @@ jobs:
- name: Install dependencies
run: npm install minimatch
- name: Label PR based on file changes and PR template
uses: actions/github-script@v7
with:
@ -34,7 +33,7 @@ jobs:
const autolabelerConfig = JSON.parse(fileContent);
const prNumber = context.payload.pull_request.number;
const prBody = context.payload.pull_request.body || "";
const prBody = context.payload.pull_request.body.toLowerCase();
let labelsToAdd = new Set();
@ -44,67 +43,51 @@ jobs:
pull_number: prNumber,
});
const prFiles = prListFilesResponse.data;
// Apply labels based on file changes
for (const [label, rules] of Object.entries(autolabelerConfig)) {
const shouldAddLabel = prFiles.some((prFile) => {
return rules.some((rule) => {
const isFileStatusMatch = rule.fileStatus ? rule.fileStatus === prFile.status : true;
const isIncludeGlobMatch = rule.includeGlobs.some((glob) => minimatch(prFile.filename, glob));
const isExcludeGlobMatch = rule.excludeGlobs.some((glob) => minimatch(prFile.filename, glob));
return isFileStatusMatch && isIncludeGlobMatch && !isExcludeGlobMatch;
});
});
if (shouldAddLabel) {
labelsToAdd.add(label);
if (label === "update script") {
for (const prFile of prFiles) {
const filename = prFile.filename;
if (filename.startsWith("vm/")) labelsToAdd.add("vm");
if (filename.startsWith("tools/addon/")) labelsToAdd.add("addon");
if (filename.startsWith("tools/pve/")) labelsToAdd.add("pve-tool");
}
}
}
}
if (labelsToAdd.size < 2) {
const templateLabelMappings = {
//if two labels or more are added, return
if (labelsToAdd.size < 2) {
const templateLabelMappings = {
"🐞 **Bug fix**": "bugfix",
"✨ **New feature**": "feature",
"💥 **Breaking change**": "breaking change",
"🆕 **New script**": "new script",
"🌍 **Website update**": "website", // handled special
"🔧 **Refactoring / Code Cleanup**": "refactor",
"📝 **Documentation update**": "documentation" // mapped to maintenance
};
for (const [checkbox, label] of Object.entries(templateLabelMappings)) {
const escapedCheckbox = checkbox.replace(/([.*+?^=!:${}()|[\]\/\\])/g, "\\$1");
const regex = new RegExp(`- \\[(x|X)\\]\\s*${escapedCheckbox}`, "i");
if (regex.test(prBody)) {
if (label === "website") {
const hasJson = prFiles.some((f) => f.filename.startsWith("frontend/public/json/"));
const hasUpdateScript = labelsToAdd.has("update script");
const hasContentLabel = ["bugfix", "feature", "refactor"].some((l) => labelsToAdd.has(l));
if (!(hasUpdateScript && hasContentLabel)) {
labelsToAdd.add(hasJson ? "json" : "website");
}
} else if (label === "documentation") {
labelsToAdd.add("maintenance");
} else {
labelsToAdd.add(label);
}
const escapedCheckbox = checkbox.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, "\\$1");
const regex = new RegExp(`- \\[(x|X)\\]\\s*.*${escapedCheckbox}`, "i");
const match = prBody.match(regex);
if (match) {
console.log(`Match: ${match}`);
labelsToAdd.add(label);
}
}
}
if (labelsToAdd.size === 0) {
labelsToAdd.add("needs triage");
}
console.log(`Labels to add: ${Array.from(labelsToAdd).join(", ")}`);
if (labelsToAdd.size > 0) {
console.log(`Adding labels ${Array.from(labelsToAdd).join(", ")} to PR ${prNumber}`);
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,

2
.github/workflows/changelog-pr.yml generated vendored
View File

@ -80,7 +80,7 @@ jobs:
{ title: "💥 Breaking Changes", labels: ["breaking change"], notes: [] },
{ title: "📡 API", labels: ["api"], notes: [] },
{ title: "Github", labels: ["github"], notes: [] },
{ title: "📝 Documentation", labels: ["maintenance"], notes: [] },
{ title: "📝 Documentation", labels: ["documentation"], notes: [] },
{ title: "🔧 Refactor", labels: ["refactor"], notes: [] }
] :
obj.labels.includes("website") ? [

View File

@ -10,162 +10,9 @@
> [!CAUTION]
Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit the project's popularity for potentially malicious purposes.
## 2025-07-12
> [!NOTE]
All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
## 2025-07-11
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- immich: hotfix #5921 [@vhsdream](https://github.com/vhsdream) ([#5938](https://github.com/community-scripts/ProxmoxVE/pull/5938))
- bookstack: add setup_composer in update [@MickLesk](https://github.com/MickLesk) ([#5935](https://github.com/community-scripts/ProxmoxVE/pull/5935))
- Quickfix: Immich: revert install sequence [@vhsdream](https://github.com/vhsdream) ([#5932](https://github.com/community-scripts/ProxmoxVE/pull/5932))
- #### ✨ New Features
- Refactor & Function Bump: Docker [@MickLesk](https://github.com/MickLesk) ([#5889](https://github.com/community-scripts/ProxmoxVE/pull/5889))
- #### 🔧 Refactor
- Immich: handle custom library dependency updates; other fixes [@vhsdream](https://github.com/vhsdream) ([#5896](https://github.com/community-scripts/ProxmoxVE/pull/5896))
## 2025-07-10
### 🚀 Updated Scripts
- Refactor: Habitica [@MickLesk](https://github.com/MickLesk) ([#5911](https://github.com/community-scripts/ProxmoxVE/pull/5911))
- #### 🐞 Bug Fixes
- core: fix breaking re-download of lxc containers [@MickLesk](https://github.com/MickLesk) ([#5906](https://github.com/community-scripts/ProxmoxVE/pull/5906))
- PLANKA: Fix paths to application directory [@tremor021](https://github.com/tremor021) ([#5900](https://github.com/community-scripts/ProxmoxVE/pull/5900))
- #### 🔧 Refactor
- Refactor: EMQX + Update-Function + Improved NodeJS Crawling [@MickLesk](https://github.com/MickLesk) ([#5907](https://github.com/community-scripts/ProxmoxVE/pull/5907))
## 2025-07-09
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- Omada Update: add missing exit [@MickLesk](https://github.com/MickLesk) ([#5894](https://github.com/community-scripts/ProxmoxVE/pull/5894))
- FreshRSS: fix needed php modules [@MickLesk](https://github.com/MickLesk) ([#5886](https://github.com/community-scripts/ProxmoxVE/pull/5886))
- core: Fix VAAPI passthrough for unprivileged LXC containers via devX [@MickLesk](https://github.com/MickLesk) ([#5875](https://github.com/community-scripts/ProxmoxVE/pull/5875))
- tools.func: fix an bug while php libapache2-mod breaks [@MickLesk](https://github.com/MickLesk) ([#5857](https://github.com/community-scripts/ProxmoxVE/pull/5857))
- BabyBuddy: fix path issues for update [@MickLesk](https://github.com/MickLesk) ([#5856](https://github.com/community-scripts/ProxmoxVE/pull/5856))
- #### ✨ New Features
- tools.func: strip leading folders for prebuild assets [@MickLesk](https://github.com/MickLesk) ([#5865](https://github.com/community-scripts/ProxmoxVE/pull/5865))
- #### 💥 Breaking Changes
- Refactor: Stirling-PDF [@MickLesk](https://github.com/MickLesk) ([#5872](https://github.com/community-scripts/ProxmoxVE/pull/5872))
- #### 🔧 Refactor
- Refactor: EMQX [@tremor021](https://github.com/tremor021) ([#5840](https://github.com/community-scripts/ProxmoxVE/pull/5840))
- Refactor: Excalidraw [@tremor021](https://github.com/tremor021) ([#5841](https://github.com/community-scripts/ProxmoxVE/pull/5841))
- Refactor: Firefly [@tremor021](https://github.com/tremor021) ([#5844](https://github.com/community-scripts/ProxmoxVE/pull/5844))
- Refactor: gatus [@tremor021](https://github.com/tremor021) ([#5849](https://github.com/community-scripts/ProxmoxVE/pull/5849))
- Refactor: FreshRSS [@tremor021](https://github.com/tremor021) ([#5847](https://github.com/community-scripts/ProxmoxVE/pull/5847))
- Refactor: Fluid-Calendar [@tremor021](https://github.com/tremor021) ([#5846](https://github.com/community-scripts/ProxmoxVE/pull/5846))
- Refactor: Commafeed [@tremor021](https://github.com/tremor021) ([#5802](https://github.com/community-scripts/ProxmoxVE/pull/5802))
- Refactor: FlareSolverr [@tremor021](https://github.com/tremor021) ([#5845](https://github.com/community-scripts/ProxmoxVE/pull/5845))
- Refactor: Glance [@tremor021](https://github.com/tremor021) ([#5874](https://github.com/community-scripts/ProxmoxVE/pull/5874))
- Refactor: Gitea [@tremor021](https://github.com/tremor021) ([#5876](https://github.com/community-scripts/ProxmoxVE/pull/5876))
- Refactor: Ghost (use now MySQL) [@MickLesk](https://github.com/MickLesk) ([#5871](https://github.com/community-scripts/ProxmoxVE/pull/5871))
### 🧰 Maintenance
- #### 📂 Github
- Github: AutoLabler | ChangeLog (Refactor) [@MickLesk](https://github.com/MickLesk) ([#5868](https://github.com/community-scripts/ProxmoxVE/pull/5868))
## 2025-07-08
### 🚀 Updated Scripts
- Refactor: Emby [@tremor021](https://github.com/tremor021) ([#5839](https://github.com/community-scripts/ProxmoxVE/pull/5839))
- #### 🐞 Bug Fixes
- Ollama: fix update script [@lucacome](https://github.com/lucacome) ([#5819](https://github.com/community-scripts/ProxmoxVE/pull/5819))
- #### ✨ New Features
- tools.func: add ffmpeg + minor improvement [@MickLesk](https://github.com/MickLesk) ([#5834](https://github.com/community-scripts/ProxmoxVE/pull/5834))
- #### 🔧 Refactor
- Refactor: ErsatzTV [@MickLesk](https://github.com/MickLesk) ([#5835](https://github.com/community-scripts/ProxmoxVE/pull/5835))
## 2025-07-07
### 🚀 Updated Scripts
- #### 🐞 Bug Fixes
- Fix/stirling pdf script [@JcMinarro](https://github.com/JcMinarro) ([#5803](https://github.com/community-scripts/ProxmoxVE/pull/5803))
- gitea-mirror: update repo-url [@CrazyWolf13](https://github.com/CrazyWolf13) ([#5794](https://github.com/community-scripts/ProxmoxVE/pull/5794))
- Fix unbound var in pulse.sh [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#5807](https://github.com/community-scripts/ProxmoxVE/pull/5807))
- Bookstack: Fix PHP Issue & Bump to PHP 8.3 [@MickLesk](https://github.com/MickLesk) ([#5779](https://github.com/community-scripts/ProxmoxVE/pull/5779))
- #### ✨ New Features
- Refactor: Threadfin (+ updatable) [@MickLesk](https://github.com/MickLesk) ([#5783](https://github.com/community-scripts/ProxmoxVE/pull/5783))
- tools.func: better handling when unpacking tarfiles in prebuild mode [@MickLesk](https://github.com/MickLesk) ([#5781](https://github.com/community-scripts/ProxmoxVE/pull/5781))
- tools.func: add AVX check for MongoDB [@MickLesk](https://github.com/MickLesk) ([#5780](https://github.com/community-scripts/ProxmoxVE/pull/5780))
- #### 🔧 Refactor
- Refactor: Docmost [@tremor021](https://github.com/tremor021) ([#5806](https://github.com/community-scripts/ProxmoxVE/pull/5806))
- Refactor: Baby Buddy [@tremor021](https://github.com/tremor021) ([#5769](https://github.com/community-scripts/ProxmoxVE/pull/5769))
- Refactor: Changed the way we install BunkerWeb by leveraging the brand new install-bunkerweb.sh [@TheophileDiot](https://github.com/TheophileDiot) ([#5707](https://github.com/community-scripts/ProxmoxVE/pull/5707))
### 🌐 Website
- #### 📝 Script Information
- PBS: add hint for advanced installs [@MickLesk](https://github.com/MickLesk) ([#5788](https://github.com/community-scripts/ProxmoxVE/pull/5788))
- EMQX: Add warning to website [@tremor021](https://github.com/tremor021) ([#5770](https://github.com/community-scripts/ProxmoxVE/pull/5770))
## 2025-07-06
### 🚀 Updated Scripts
- Refactor: Barcodebuddy [@tremor021](https://github.com/tremor021) ([#5735](https://github.com/community-scripts/ProxmoxVE/pull/5735))
- #### 🐞 Bug Fixes
- Fix update script for Mafl: ensure directory is removed recursively [@jonalbr](https://github.com/jonalbr) ([#5759](https://github.com/community-scripts/ProxmoxVE/pull/5759))
- BookStack: Typo fix [@tremor021](https://github.com/tremor021) ([#5746](https://github.com/community-scripts/ProxmoxVE/pull/5746))
- Resolves incorrect URL at end of Pocket ID script [@johnsturgeon](https://github.com/johnsturgeon) ([#5743](https://github.com/community-scripts/ProxmoxVE/pull/5743))
- #### ✨ New Features
- [Feature] Add option to expose Docker via TCP port (alpine docker) [@oformaniuk](https://github.com/oformaniuk) ([#5716](https://github.com/community-scripts/ProxmoxVE/pull/5716))
- #### 🔧 Refactor
- Refactor: Bitmagnet [@tremor021](https://github.com/tremor021) ([#5733](https://github.com/community-scripts/ProxmoxVE/pull/5733))
- Refactor: Baikal [@tremor021](https://github.com/tremor021) ([#5736](https://github.com/community-scripts/ProxmoxVE/pull/5736))
## 2025-07-05
### 🚀 Updated Scripts
- #### 🔧 Refactor
- Refactor: BookStack [@tremor021](https://github.com/tremor021) ([#5732](https://github.com/community-scripts/ProxmoxVE/pull/5732))
- Refactor: Authelia [@tremor021](https://github.com/tremor021) ([#5722](https://github.com/community-scripts/ProxmoxVE/pull/5722))
- Refactor: Dashy [@tremor021](https://github.com/tremor021) ([#5723](https://github.com/community-scripts/ProxmoxVE/pull/5723))
- Refactor: CryptPad [@tremor021](https://github.com/tremor021) ([#5724](https://github.com/community-scripts/ProxmoxVE/pull/5724))
- Refactor: ByteStash [@tremor021](https://github.com/tremor021) ([#5725](https://github.com/community-scripts/ProxmoxVE/pull/5725))
- Refactor: AgentDVR [@tremor021](https://github.com/tremor021) ([#5726](https://github.com/community-scripts/ProxmoxVE/pull/5726))
## 2025-07-04
@ -173,19 +20,11 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
- #### 🐞 Bug Fixes
- Refactor: Mafl [@tremor021](https://github.com/tremor021) ([#5702](https://github.com/community-scripts/ProxmoxVE/pull/5702))
- Outline: Fix sed command for v0.85.0 [@tremor021](https://github.com/tremor021) ([#5688](https://github.com/community-scripts/ProxmoxVE/pull/5688))
- Komodo: Update Script to use FerretDB / remove psql & sqlite options [@MickLesk](https://github.com/MickLesk) ([#5690](https://github.com/community-scripts/ProxmoxVE/pull/5690))
- ESPHome: Fix Linking issue to prevent version mismatch [@MickLesk](https://github.com/MickLesk) ([#5685](https://github.com/community-scripts/ProxmoxVE/pull/5685))
- Cloudflare-DDNS: fix unvisible read command at install [@MickLesk](https://github.com/MickLesk) ([#5682](https://github.com/community-scripts/ProxmoxVE/pull/5682))
- #### ✨ New Features
- Core layer refactor: centralized error traps and msg_* consistency [@MickLesk](https://github.com/MickLesk) ([#5705](https://github.com/community-scripts/ProxmoxVE/pull/5705))
- #### 💥 Breaking Changes
- Update Iptag [@DesertGamer](https://github.com/DesertGamer) ([#5677](https://github.com/community-scripts/ProxmoxVE/pull/5677))
- Cloudflare-DDNS: fix unvisible read command at install [@MickLesk](https://github.com/MickLesk) ([#5682](https://github.com/community-scripts/ProxmoxVE/pull/5682))
### 🌐 Website

View File

@ -1,7 +1,7 @@
<div align="center">
<p align="center">
<a href="#">
<img src="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/images/logo-81x112.png" height="100px" />
<img src="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/images/logo.png" height="100px" />
</a>
</p>
</div>

View File

@ -22,30 +22,30 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d "/etc/authelia/" ]]; then
msg_error "No ${APP} Installation Found!"
header_info
check_container_storage
check_container_resources
if [[ ! -d "/etc/authelia/" ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(/usr/bin/authelia -v | awk '{print substr($3, 2, length($2)) }')" ]]; then
msg_info "Updating $APP to ${RELEASE}"
$STD apt-get update
$STD apt-get -y upgrade
curl -fsSL "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb" -o $(basename "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb")
$STD dpkg -i "authelia_${RELEASE}_amd64.deb"
msg_info "Cleaning Up"
rm -f "authelia_${RELEASE}_amd64.deb"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleanup Completed"
msg_ok "Updated $APP to ${RELEASE}"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(/usr/bin/authelia -v | awk '{print substr($3, 2, length($2)) }')" ]]; then
$STD apt-get update
$STD apt-get -y upgrade
fetch_and_deploy_gh_release "authelia" "authelia/authelia" "binary"
msg_info "Cleaning Up"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleanup Completed"
msg_ok "Updated $APP to ${RELEASE}"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
}
start

View File

@ -29,7 +29,7 @@ function update_script() {
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/babybuddy/babybuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.babybuddy 2>/dev/null)" ]] || [[ ! -f ~/.babybuddy ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/babybuddy_version.txt)" ]]; then
setup_uv
msg_info "Stopping Services"
@ -38,18 +38,21 @@ function update_script() {
msg_ok "Services Stopped"
msg_info "Cleaning old files"
cp /opt/babybuddy/babybuddy/settings/production.py /tmp/production.py.bak
cp babybuddy/settings/production.py /tmp/production.py.bak
find . -mindepth 1 -maxdepth 1 ! -name '.venv' -exec rm -rf {} +
msg_ok "Cleaned old files"
fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy"
msg_info "Updating ${APP} to v${RELEASE}"
temp_file=$(mktemp)
curl -fsSL "https://github.com/babybuddy/babybuddy/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
cd /opt/babybuddy
tar zxf "$temp_file" --strip-components=1 -C /opt/babybuddy
mv /tmp/production.py.bak babybuddy/settings/production.py
cd /opt/babybuddy
mv /tmp/production.py.bak /opt/babybuddy/babybuddy/settings/production.py
source .venv/bin/activate
$STD uv pip install -r requirements.txt
$STD python manage.py migrate
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Fixing permissions"
@ -63,6 +66,9 @@ function update_script() {
systemctl start nginx
msg_ok "Services Started"
msg_info "Cleaning up"
rm -f "$temp_file"
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"

View File

@ -23,35 +23,34 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/baikal ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/sabre-io/Baikal/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(cat ~/.baikal 2>/dev/null)" ]] || [[ ! -f ~/.baikal ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Service"
systemctl stop apache2
msg_ok "Stopped Service"
msg_info "Backing up data"
msg_info "Updating ${APP} to v${RELEASE}"
cd /opt
curl -fsSL "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip" -o $(basename "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip")
mv /opt/baikal /opt/baikal-backup
msg_ok "Backed up data"
fetch_and_deploy_gh_release "baikal" "sabre-io/Baikal"
msg_info "Configuring Baikal"
$STD unzip -o "baikal-${RELEASE}.zip"
cp -r /opt/baikal-backup/config/baikal.yaml /opt/baikal/config/
cp -r /opt/baikal-backup/Specific/ /opt/baikal/
chown -R www-data:www-data /opt/baikal/
chmod -R 755 /opt/baikal/
msg_ok "Configured Baikal"
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting Service"
systemctl start apache2
msg_ok "Started Service"
msg_info "Cleaning up"
rm -rf "/opt/baikal-${RELEASE}.zip"
rm -rf /opt/baikal-backup
msg_ok "Cleaned"
msg_ok "Updated Successfully"

View File

@ -23,28 +23,27 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/barcodebuddy ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Forceu/barcodebuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.barcodebuddy 2>/dev/null)" ]] || [[ ! -f ~/.barcodebuddy ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Service"
systemctl stop apache2
systemctl stop barcodebuddy
msg_ok "Stopped Service"
msg_info "Backing up data"
msg_info "Updating ${APP} to v${RELEASE}"
cd /opt
mv /opt/barcodebuddy/ /opt/barcodebuddy-backup
msg_ok "Backed up data"
fetch_and_deploy_gh_release "barcodebuddy" "Forceu/barcodebuddy"
msg_info "Configuring ${APP}"
curl -fsSL "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip" -o $(basename "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip")
$STD unzip "v${RELEASE}.zip"
mv "/opt/barcodebuddy-${RELEASE}" /opt/barcodebuddy
cp -r /opt/barcodebuddy-backup/data/. /opt/barcodebuddy/data
chown -R www-data:www-data /opt/barcodebuddy/data
msg_ok "Configured ${APP}"
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting Service"
systemctl start apache2
@ -52,6 +51,7 @@ function update_script() {
msg_ok "Started Service"
msg_info "Cleaning up"
rm -r "/opt/v${RELEASE}.zip"
rm -r /opt/barcodebuddy-backup
msg_ok "Cleaned"
msg_ok "Updated Successfully"

View File

@ -28,12 +28,12 @@ function update_script() {
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.bitmagnet 2>/dev/null)" ]] || [[ ! -f ~/.bitmagnet ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Service"
systemctl stop bitmagnet-web
msg_ok "Stopped Service"
msg_info "Backing up data"
msg_info "Backing up database"
rm -f /tmp/backup.sql
$STD sudo -u postgres pg_dump \
--column-inserts \
@ -56,26 +56,31 @@ function update_script() {
bitmagnet \
>/tmp/backup.sql
mv /tmp/backup.sql /opt/
[ -f /opt/bitmagnet/.env ] && cp /opt/bitmagnet/.env /opt/
[ -f /opt/bitmagnet/config.yml ] && cp /opt/bitmagnet/config.yml /opt/
msg_ok "Data backed up"
rm -rf /opt/bitmagnet
fetch_and_deploy_gh_release "bitmagnet" "bitmagnet-io/bitmagnet"
msg_ok "Database backed up"
msg_info "Updating ${APP} to v${RELEASE}"
[ -f /opt/bitmagnet/.env ] && cp /opt/bitmagnet/.env /opt/
[ -f /opt/bitmagnet/config.yml ] && cp /opt/bitmagnet/config.yml /opt/
rm -rf /opt/bitmagnet/*
temp_file=$(mktemp)
curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
cd /opt/bitmagnet
VREL=v$RELEASE
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
chmod +x bitmagnet
[ -f "/opt/.env" ] && cp "/opt/.env" /opt/bitmagnet/
[ -f "/opt/config.yml" ] && cp "/opt/config.yml" /opt/bitmagnet/
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting Service"
systemctl start bitmagnet-web
msg_ok "Started Service"
msg_info "Cleaning up"
rm -f "$temp_file"
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"

View File

@ -23,33 +23,25 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/bookstack ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.bookstack 2>/dev/null)" ]] || [[ ! -f ~/.bookstack ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Apache2"
systemctl stop apache2
msg_ok "Services Stopped"
msg_info "Backing up data"
msg_info "Updating ${APP} to v${RELEASE}"
mv /opt/bookstack /opt/bookstack-backup
msg_ok "Backup finished"
fetch_and_deploy_gh_release "bookstack" "BookStackApp/BookStack"
PHP_MODULE="ldap,tidy,bz2,mysqli" PHP_FPM="YES" PHP_APACHE="YES" PHP_VERSION="8.3" setup_php
setup_composer
msg_info "Restoring backup"
curl -fsSL "https://github.com/BookStackApp/BookStack/archive/refs/tags/v${RELEASE}.zip" -o "/opt/BookStack-${RELEASE}.zip"
$STD unzip "/opt/BookStack-${RELEASE}.zip" -d /opt
mv "/opt/BookStack-${RELEASE}" /opt/bookstack
cp /opt/bookstack-backup/.env /opt/bookstack/.env
[[ -d /opt/bookstack-backup/public/uploads ]] && cp -a /opt/bookstack-backup/public/uploads/. /opt/bookstack/public/uploads/
[[ -d /opt/bookstack-backup/storage/uploads ]] && cp -a /opt/bookstack-backup/storage/uploads/. /opt/bookstack/storage/uploads/
[[ -d /opt/bookstack-backup/themes ]] && cp -a /opt/bookstack-backup/themes/. /opt/bookstack/themes/
msg_ok "Backup restored"
msg_info "Configuring BookStack"
cd /opt/bookstack
export COMPOSER_ALLOW_SUPERUSER=1
$STD composer install --no-dev
@ -59,7 +51,7 @@ function update_script() {
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
chmod -R 640 /opt/bookstack/.env
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Configured BookStack"
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Starting Apache2"
systemctl start apache2
@ -67,6 +59,7 @@ function update_script() {
msg_info "Cleaning Up"
rm -rf /opt/bookstack-backup
rm -rf "/opt/BookStack-${RELEASE}.zip"
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else

View File

@ -37,8 +37,8 @@ Pin: version ${RELEASE}
Pin-Priority: 1001
EOF
apt-get update
apt-mark unhold bunkerweb nginx
apt-get install -y --allow-downgrades bunkerweb=${RELEASE}
apt-get install -y nginx=1.26.3*
apt-get install -y bunkerweb=${RELEASE}
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to ${RELEASE}"

View File

@ -20,47 +20,47 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/bytestash ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.bytestash 2>/dev/null)" ]] || [[ ! -f ~/.bytestash ]]; then
read -rp "${TAB3}Did you make a backup via application WebUI? (y/n): " backuped
if [[ "$backuped" =~ ^[Yy]$ ]]; then
msg_info "Stopping Services"
systemctl stop bytestash-backend
systemctl stop bytestash-frontend
msg_ok "Services Stopped"
rm -rf /opt/bytestash
fetch_and_deploy_gh_release "bytestash" "jordan-dalby/ByteStash"
msg_info "Configuring ByteStash"
cd /opt/bytestash/server
$STD npm install
cd /opt/bytestash/client
$STD npm install
msg_ok "Updated ${APP}"
msg_info "Starting Services"
systemctl start bytestash-backend
systemctl start bytestash-frontend
msg_ok "Started Services"
else
msg_error "PLEASE MAKE A BACKUP FIRST!"
exit
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/bytestash ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Services"
systemctl stop bytestash-backend
systemctl stop bytestash-frontend
msg_ok "Services Stopped"
msg_info "Updating ${APP} to ${RELEASE}"
temp_file=$(mktemp)
curl -fsSL "https://github.com/jordan-dalby/ByteStash/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf $temp_file
rm -rf /opt/bytestash/server/node_modules
rm -rf /opt/bytestash/client/node_modules
cp -rf ByteStash-${RELEASE}/* /opt/bytestash
cd /opt/bytestash/server
$STD npm install
cd /opt/bytestash/client
$STD npm install
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP}"
msg_info "Starting Services"
systemctl start bytestash-backend
systemctl start bytestash-frontend
msg_ok "Started Services"
msg_info "Cleaning Up"
rm -f $temp_file
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
}
start
@ -70,4 +70,4 @@ description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"

View File

@ -23,13 +23,12 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/commafeed ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Athou/commafeed/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
if [[ "${RELEASE}" != "$(cat ~/.commafeed 2>/dev/null)" ]] || [[ ! -f ~/.commafeed ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping ${APP}"
systemctl stop commafeed
msg_ok "Stopped ${APP}"
@ -40,15 +39,13 @@ function update_script() {
$STD apt-get install -y rsync
msg_ok "Installed Dependencies"
fi
if [ -d /opt/commafeed/data ] && [ "$(ls -A /opt/commafeed/data)" ]; then
mv /opt/commafeed/data /opt/data.bak
fi
fetch_and_deploy_gh_release "commafeed" "Athou/commafeed" "prebuild" "latest" "/opt/commafeed" "commafeed-*-h2-jvm.zip"
msg_info "Updating ${APP} to ${RELEASE}"
if [ -d /opt/commafeed/data.bak ] && [ "$(ls -A /opt/commafeed/data.bak)" ]; then
mv /opt/commafeed/data.bak /opt/commafeed/data
fi
curl -fsSL "https://github.com/Athou/commafeed/releases/download/${RELEASE}/commafeed-${RELEASE}-h2-jvm.zip" -o $(basename "https://github.com/Athou/commafeed/releases/download/${RELEASE}/commafeed-${RELEASE}-h2-jvm.zip")
$STD unzip commafeed-"${RELEASE}"-h2-jvm.zip
rsync -a --exclude 'data/' commafeed-"${RELEASE}"-h2/ /opt/commafeed/
rm -rf commafeed-"${RELEASE}"-h2 commafeed-"${RELEASE}"-h2-jvm.zip
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to ${RELEASE}"
msg_info "Starting ${APP}"

View File

@ -20,46 +20,48 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
header_info
check_container_storage
check_container_resources
if [[ ! -d "/opt/cryptpad" ]]; then
msg_error "No ${APP} Installation Found!"
if [[ ! -d "/opt/cryptpad" ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Stopping $APP"
systemctl stop cryptpad
msg_ok "Stopped $APP"
msg_info "Updating $APP to ${RELEASE}"
temp_dir=$(mktemp -d)
cp -f /opt/cryptpad/config/config.js /opt/config.js
curl -fsSL "https://github.com/cryptpad/cryptpad/archive/refs/tags/${RELEASE}.tar.gz" -o "$temp_dir/cryptpad-${RELEASE}.tar.gz"
cd "$temp_dir"
tar zxf "cryptpad-${RELEASE}.tar.gz"
cp -rf "cryptpad-${RELEASE}"/* /opt/cryptpad
cd /opt/cryptpad
$STD npm ci
$STD npm run install:components
$STD npm run build
cp -f /opt/config.js /opt/cryptpad/config/config.js
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to ${RELEASE}"
msg_info "Cleaning Up"
rm -rf $temp_dir
msg_ok "Cleanup Completed"
msg_info "Starting $APP"
systemctl start cryptpad
msg_ok "Started $APP"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(cat ~/.cryptpad 2>/dev/null)" ]] || [[ ! -f ~/.cryptpad ]]; then
msg_info "Stopping $APP"
systemctl stop cryptpad
msg_ok "Stopped $APP"
msg_info "Backing up configuration"
[ -f /opt/cryptpad/config/config.js ] && mv /opt/cryptpad/config/config.js /opt/
msg_ok "Backed up configuration"
fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad"
msg_info "Updating $APP to ${RELEASE}"
cd /opt/cryptpad
$STD npm ci
$STD npm run install:components
$STD npm run build
msg_ok "Updated $APP to ${RELEASE}"
msg_info "Restoring configuration"
mv /opt/config.js /opt/cryptpad/config/
msg_ok "Configuration restored"
msg_info "Starting $APP"
systemctl start cryptpad
msg_ok "Started $APP"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
}
start

View File

@ -29,7 +29,7 @@ function update_script() {
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Lissy93/dashy/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
if [[ "${RELEASE}" != "$(cat ~/.dashy 2>/dev/null)" ]] || [[ ! -f ~/.dashy ]]; then
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Stopping ${APP}"
systemctl stop dashy
msg_ok "Stopped ${APP}"
@ -43,13 +43,14 @@ function update_script() {
fi
msg_ok "Backed up conf.yml"
rm -rf /opt/dashy
fetch_and_deploy_gh_release "dashy" "Lissy93/dashy"
msg_info "Updating ${APP} to ${RELEASE}"
rm -rf /opt/dashy
mkdir -p /opt/dashy
curl -fsSL "https://github.com/Lissy93/dashy/archive/refs/tags/${RELEASE}.tar.gz" | tar -xz -C /opt/dashy --strip-components=1
cd /opt/dashy
npm install
npm run build
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP} to ${RELEASE}"
msg_info "Restoring conf.yml"
@ -64,7 +65,6 @@ function update_script() {
msg_info "Starting Dashy"
systemctl start dashy
msg_ok "Started Dashy"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"

View File

@ -20,68 +20,18 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
get_latest_release() {
curl -fsSL https://api.github.com/repos/"$1"/releases/latest | grep '"tag_name":' | cut -d'"' -f4
}
msg_info "Updating base system"
$STD apt-get update
$STD apt-get -y upgrade
msg_ok "Base system updated"
msg_info "Updating Docker Engine"
$STD apt-get install --only-upgrade -y docker-ce docker-ce-cli containerd.io
msg_ok "Docker Engine updated"
if [[ -f /usr/local/lib/docker/cli-plugins/docker-compose ]]; then
COMPOSE_BIN="/usr/local/lib/docker/cli-plugins/docker-compose"
COMPOSE_NEW_VERSION=$(get_latest_release "docker/compose")
msg_info "Updating Docker Compose to $COMPOSE_NEW_VERSION"
curl -fsSL "https://github.com/docker/compose/releases/download/${COMPOSE_NEW_VERSION}/docker-compose-$(uname -s)-$(uname -m)" \
-o "$COMPOSE_BIN"
chmod +x "$COMPOSE_BIN"
msg_ok "Docker Compose updated"
fi
if docker ps -a --format '{{.Names}}' | grep -q '^portainer$'; then
msg_info "Updating Portainer"
$STD docker pull portainer/portainer-ce:latest
$STD docker stop portainer && docker rm portainer
$STD docker volume create portainer_data >/dev/null 2>&1
$STD docker run -d \
-p 8000:8000 \
-p 9443:9443 \
--name=portainer \
--restart=always \
-v /var/run/docker.sock:/var/run/docker.sock \
-v portainer_data:/data \
portainer/portainer-ce:latest
msg_ok "Updated Portainer"
fi
if docker ps -a --format '{{.Names}}' | grep -q '^portainer_agent$'; then
msg_info "Updating Portainer Agent"
$STD docker pull portainer/agent:latest
$STD docker stop portainer_agent && docker rm portainer_agent
$STD docker run -d \
-p 9001:9001 \
--name=portainer_agent \
--restart=always \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /var/lib/docker/volumes:/var/lib/docker/volumes \
portainer/agent
msg_ok "Updated Portainer Agent"
fi
msg_info "Cleaning up"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleanup complete"
exit
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating ${APP} LXC"
$STD apt-get update
$STD apt-get -y upgrade
msg_ok "Updated ${APP} LXC"
exit
}
start
@ -89,4 +39,4 @@ build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"

View File

@ -27,35 +27,48 @@ function update_script() {
exit
fi
if ! command -v node >/dev/null || [[ "$(/usr/bin/env node -v | grep -oP '^v\K[0-9]+')" != "22" ]]; then
NODE_VERSION="22" NODE_MODULE="pnpm@$(curl -s https://raw.githubusercontent.com/docmost/docmost/main/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
msg_info "Installing Node.js 22"
$STD apt-get purge -y nodejs
rm -f /etc/apt/sources.list.d/nodesource.list
rm -f /etc/apt/keyrings/nodesource.gpg
mkdir -p /etc/apt/keyrings
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_22.x nodistro main" >/etc/apt/sources.list.d/nodesource.list
$STD apt-get update
$STD apt-get install -y nodejs
$STD npm install -g pnpm@10.4.0
msg_ok "Node.js 22 installed"
fi
export NODE_OPTIONS="--max_old_space_size=4096"
RELEASE=$(curl -fsSL https://api.github.com/repos/docmost/docmost/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.docmost 2>/dev/null)" ]] || [[ ! -f ~/.docmost ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping ${APP}"
systemctl stop docmost
msg_ok "${APP} Stopped"
msg_info "Backing up data"
msg_info "Updating ${APP} to v${RELEASE}"
cp /opt/docmost/.env /opt/
cp -r /opt/docmost/data /opt/
rm -rf /opt/docmost
msg_ok "Data backed up"
fetch_and_deploy_gh_release "docmost" "docmost/docmost"
msg_info "Updating ${APP} to v${RELEASE}"
temp_file=$(mktemp)
curl -fsSL "https://github.com/docmost/docmost/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar -xzf "$temp_file"
mv docmost-${RELEASE} /opt/docmost
cd /opt/docmost
mv /opt/.env /opt/docmost/.env
mv /opt/data /opt/docmost/data
$STD pnpm install --force
$STD pnpm build
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ${APP}"
msg_info "Starting ${APP}"
systemctl start docmost
msg_ok "Started ${APP}"
msg_info "Cleaning Up"
rm -f ${temp_file}
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"

View File

@ -23,26 +23,26 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/emby-server ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/MediaBrowser/Emby.Releases/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
if [[ "${RELEASE}" != "$(cat ~/.emby 2>/dev/null)" ]] || [[ ! -f ~/.emby ]]; then
msg_info "Stopping ${APP}"
systemctl stop emby-server
msg_ok "Stopped ${APP}"
LATEST=$(curl -fsSL https://api.github.com/repos/MediaBrowser/Emby.Releases/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
msg_info "Stopping ${APP}"
systemctl stop emby-server
msg_ok "Stopped ${APP}"
fetch_and_deploy_gh_release "emby" "MediaBrowser/Emby.Releases" "binary"
msg_info "Updating ${APP}"
$STD curl -fsSL "https://github.com/MediaBrowser/Emby.Releases/releases/download/${LATEST}/emby-server-deb_${LATEST}_amd64.deb" -o "emby-server-deb_${LATEST}_amd64.deb"
$STD dpkg -i "emby-server-deb_${LATEST}_amd64.deb"
rm "emby-server-deb_${LATEST}_amd64.deb"
msg_ok "Updated ${APP}"
msg_info "Starting ${APP}"
systemctl start emby-server
msg_ok "Started ${APP}"
msg_ok "Updated Successfully"
exit
fi
msg_info "Starting ${APP}"
systemctl start emby-server
msg_ok "Started ${APP}"
msg_ok "Updated Successfully"
exit
}
start

View File

@ -20,39 +20,18 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
RELEASE=$(curl -fsSL https://www.emqx.com/en/downloads/enterprise | grep -oP '/en/downloads/enterprise/v\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -n1)
if [[ "$RELEASE" != "$(cat ~/.emqx 2>/dev/null)" ]] || [[ ! -f ~/.emqx ]]; then
msg_info "Stopping EMQX"
systemctl stop emqx
msg_ok "Stopped EMQX"
msg_info "Downloading EMQX v${RELEASE}"
DEB_FILE="/tmp/emqx-enterprise-${RELEASE}-debian12-amd64.deb"
curl -fsSL -o "$DEB_FILE" "https://www.emqx.com/en/downloads/enterprise/v${RELEASE}/emqx-enterprise-${RELEASE}-debian12-amd64.deb"
msg_ok "Downloaded EMQX"
msg_info "Installing EMQX"
$STD apt-get install -y "$DEB_FILE"
msg_ok "Installed EMQX v${RELEASE}"
msg_info "Starting EMQX"
systemctl start emqx
echo "$RELEASE" >~/.emqx
msg_ok "Started EMQX"
msg_info "Cleaning Up"
rm -f "$DEB_FILE"
msg_ok "Cleanup Completed"
msg_ok "Update Successful"
else
msg_ok "No update required. EMQX is already at v${RELEASE}"
fi
exit
header_info
check_container_storage
check_container_resources
if [[ ! -d /var ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
msg_info "Updating $APP LXC"
$STD apt-get update
$STD apt-get -y upgrade
msg_ok "Updated $APP LXC"
exit
}
start
@ -62,4 +41,4 @@ description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:18083${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:18083${CL}"

View File

@ -7,7 +7,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
APP="ErsatzTV"
var_tags="${var_tags:-iptv}"
var_cpu="${var_cpu:-2}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-1024}"
var_disk="${var_disk:-5}"
var_os="${var_os:-debian}"
@ -27,18 +27,31 @@ function update_script() {
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/ErsatzTV/ErsatzTV/releases | grep -oP '"tag_name": "\K[^"]+' | head -n 1)
if [[ "${RELEASE}" != "$(cat ~/.ersatztv 2>/dev/null)" ]] || [[ ! -f ~/.ersatztv ]]; then
if [[ ! -f /opt/${APP}_version.txt && $(echo "x.x.x" >/opt/${APP}_version.txt) || "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping ErsatzTV"
systemctl stop ersatzTV
msg_ok "Stopped ErsatzTV"
FFMPEG_VERSION="latest" FFMPEG_TYPE="medium" setup_ffmpeg
fetch_and_deploy_gh_release "ersatztv" "ErsatzTV/ErsatzTV" "prebuild" "latest" "/opt/ErsatzTV" "*linux-x64.tar.gz"
msg_info "Updating ErsatzTV"
cp -R /opt/ErsatzTV/ ErsatzTV-backup
rm ErsatzTV-backup/ErsatzTV
rm -rf /opt/ErsatzTV
temp_file=$(mktemp)
curl -fsSL "https://github.com/ErsatzTV/ErsatzTV/releases/download/${RELEASE}/ErsatzTV-${RELEASE}-linux-x64.tar.gz" -o "$temp_file"
tar -xzf "$temp_file"
mv ErsatzTV-${RELEASE}-linux-x64 /opt/ErsatzTV
cp -R ErsatzTV-backup/* /opt/ErsatzTV/
rm -rf ErsatzTV-backup
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated ErsatzTV"
msg_info "Starting ErsatzTV"
systemctl start ersatzTV
msg_ok "Started ErsatzTV"
msg_info "Cleaning Up"
rm -f ${temp_file}
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"

View File

@ -28,16 +28,20 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/excalidraw/excalidraw/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.excalidraw 2>/dev/null)" ]] || [[ ! -f ~/.excalidraw ]]; then
if [[ "${RELEASE}" != "$(cat /opt/excalidraw_version.txt)" ]] || [[ ! -f /opt/excalidraw_version.txt ]]; then
msg_info "Stopping $APP"
systemctl stop excalidraw
msg_ok "Stopped $APP"
rm -rf /opt/excalidraw
fetch_and_deploy_gh_release "excalidraw" "excalidraw/excalidraw"
msg_info "Updating $APP to v${RELEASE}"
cd /tmp
temp_file=$(mktemp)
curl -fsSL "https://github.com/excalidraw/excalidraw/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar xzf $temp_file
rm -rf /opt/excalidraw
mv excalidraw-${RELEASE} /opt/excalidraw
cd /opt/excalidraw
$STD yarn
msg_ok "Updated $APP to v${RELEASE}"
@ -46,6 +50,11 @@ function update_script() {
systemctl start excalidraw
msg_ok "Started $APP"
msg_info "Cleaning Up"
rm -rf $temp_file
msg_ok "Cleanup Completed"
echo "${RELEASE}" >/opt/excalidraw_version.txt
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"

View File

@ -29,20 +29,17 @@ function update_script() {
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/firefly-iii/firefly-iii/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
if [[ "${RELEASE}" != "$(cat ~/.firefly 2>/dev/null)" ]] || [[ ! -f ~/.firefly ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Apache2"
systemctl stop apache2
msg_ok "Stopped Apache2"
msg_info "Backing up data"
msg_info "Updating ${APP} to v${RELEASE}"
cp /opt/firefly/.env /opt/.env
cp -r /opt/firefly/storage /opt/storage
msg_ok "Backed up data"
fetch_and_deploy_gh_release "firefly" "firefly-iii/firefly-iii"
msg_info "Updating ${APP} to v${RELEASE}"
rm -rf /opt/firefly/storage
cd /opt
curl -fsSL "https://github.com/firefly-iii/firefly-iii/releases/download/v${RELEASE}/FireflyIII-v${RELEASE}.tar.gz" -o $(basename "https://github.com/firefly-iii/firefly-iii/releases/download/v${RELEASE}/FireflyIII-v${RELEASE}.tar.gz")
tar -xzf FireflyIII-v${RELEASE}.tar.gz -C /opt/firefly --exclude='storage'
cp /opt/.env /opt/firefly/.env
cp -r /opt/storage /opt/firefly/storage
cd /opt/firefly
@ -53,12 +50,16 @@ function update_script() {
$STD php artisan view:clear
$STD php artisan firefly-iii:upgrade-database
$STD php artisan firefly-iii:laravel-passport-keys
echo "${RELEASE}" >"/opt/${APP}_version.txt"
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Starting Apache2"
systemctl start apache2
msg_ok "Started Apache2"
msg_info "Cleaning up"
rm -rf /opt/FireflyIII-v${RELEASE}.tar.gz
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}."

View File

@ -23,23 +23,20 @@ function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /etc/systemd/system/flaresolverr.service ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/FlareSolverr/FlareSolverr/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
if [[ "${RELEASE}" != "$(cat ~/.flaresolverr 2>/dev/null)" ]] || [[ ! -f ~/.flaresolverr ]]; then
msg_info "Stopping service"
RELEASE=$(curl -fsSL https://github.com/FlareSolverr/FlareSolverr/releases/latest | grep "title>Release" | cut -d " " -f 4)
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Updating $APP LXC"
systemctl stop flaresolverr
msg_ok "Stopped service"
rm -rf /opt/flaresolverr
fetch_and_deploy_gh_release "flaresolverr" "FlareSolverr/FlareSolverr" "prebuild" "latest" "/opt/flaresolverr" "flaresolverr_linux_x64.tar.gz"
msg_info "Starting service"
curl -fsSL "https://github.com/FlareSolverr/FlareSolverr/releases/download/$RELEASE/flaresolverr_linux_x64.tar.gz" -o $(basename "https://github.com/FlareSolverr/FlareSolverr/releases/download/$RELEASE/flaresolverr_linux_x64.tar.gz")
tar -xzf flaresolverr_linux_x64.tar.gz -C /opt
rm flaresolverr_linux_x64.tar.gz
systemctl start flaresolverr
msg_ok "Started service"
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP LXC"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi

View File

@ -20,43 +20,51 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/fluid-calendar ]]; then
msg_error "No ${APP} Installation Found!"
if [[ ! -d /opt/fluid-calendar ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/dotnetfactory/fluid-calendar/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Stopping $APP"
systemctl stop fluid-calendar.service
msg_ok "Stopped $APP"
msg_info "Updating $APP to v${RELEASE}"
cp /opt/fluid-calendar/.env /opt/fluid.env
rm -rf /opt/fluid-calendar
tmp_file=$(mktemp)
curl -fsSL "https://github.com/dotnetfactory/fluid-calendar/archive/refs/tags/v${RELEASE}.zip" -o "$tmp_file"
$STD unzip $tmp_file
mv ${APP}-${RELEASE}/ /opt/fluid-calendar
mv /opt/fluid.env /opt/fluid-calendar/.env
cd /opt/fluid-calendar
export NEXT_TELEMETRY_DISABLED=1
$STD npm install --legacy-peer-deps
$STD npm run prisma:generate
$STD npx prisma migrate deploy
$STD npm run build:os
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting $APP"
systemctl start fluid-calendar.service
msg_ok "Started $APP"
msg_info "Cleaning Up"
rm -rf $tmp_file
msg_ok "Cleanup Completed"
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/dotnetfactory/fluid-calendar/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.fluid-calendar 2>/dev/null)" ]] || [[ ! -f ~/.fluid-calendar ]]; then
msg_info "Stopping $APP"
systemctl stop fluid-calendar
msg_ok "Stopped $APP"
cp /opt/fluid-calendar/.env /opt/fluid.env
rm -rf /opt/fluid-calendar
fetch_and_deploy_gh_release "fluid-calendar" "dotnetfactory/fluid-calendar"
msg_info "Updating $APP to v${RELEASE}"
mv /opt/fluid.env /opt/fluid-calendar/.env
cd /opt/fluid-calendar
export NEXT_TELEMETRY_DISABLED=1
$STD npm install --legacy-peer-deps
$STD npm run prisma:generate
$STD npx prisma migrate deploy
$STD npm run build:os
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting $APP"
systemctl start fluid-calendar
msg_ok "Started $APP"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
exit
}
start

View File

@ -29,29 +29,35 @@ function update_script() {
exit
fi
RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.gatus 2>/dev/null)" ]] || [[ ! -f ~/.gatus ]]; then
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Updating $APP"
msg_info "Stopping $APP"
systemctl stop gatus
msg_ok "Stopped $APP"
mv /opt/gatus/config/config.yaml /opt
rm -rf /opt/gatus
fetch_and_deploy_gh_release "gatus" "TwiN/gatus"
msg_info "Updating $APP to v${RELEASE}"
mv /opt/gatus/config/config.yaml /opt
rm -rf /opt/gatus/*
temp_file=$(mktemp)
curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
cd /opt/gatus
$STD go mod tidy
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
setcap CAP_NET_RAW+ep gatus
mv /opt/config.yaml config
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to v${RELEASE}"
msg_info "Starting $APP"
systemctl start gatus
msg_ok "Started $APP"
msg_info "Cleaning Up"
rm -f "$temp_file"
msg_ok "Cleanup Completed"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"

View File

@ -20,31 +20,26 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
header_info
check_container_storage
check_container_resources
msg_info "Updating ${APP} LXC"
if ! dpkg-query -W -f='${Status}' mariadb-server 2>/dev/null | grep -q "install ok installed"; then
setup_mysql
fi
NODE_VERSION="22" setup_nodejs
msg_info "Updating ${APP} LXC"
if command -v ghost &>/dev/null; then
current_version=$(ghost version | grep 'Ghost-CLI version' | awk '{print $3}')
latest_version=$(npm show ghost-cli version)
if [ "$current_version" != "$latest_version" ]; then
msg_info "Updating ${APP} from version v${current_version} to v${latest_version}"
$STD npm install -g ghost-cli@latest
msg_ok "Updated Successfully"
if command -v ghost &>/dev/null; then
current_version=$(ghost version | grep 'Ghost-CLI version' | awk '{print $3}')
latest_version=$(npm show ghost-cli version)
if [ "$current_version" != "$latest_version" ]; then
msg_info "Updating ${APP} from version v${current_version} to v${latest_version}"
$STD npm install -g ghost-cli@latest
msg_ok "Updated Successfully"
else
msg_ok "${APP} is already at v${current_version}"
fi
else
msg_ok "${APP} is already at v${current_version}"
msg_error "No ${APP} Installation Found!"
exit
fi
else
msg_error "No ${APP} Installation Found!"
exit
fi
exit
}
start
@ -54,4 +49,4 @@ description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:2368${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:2368${CL}"

View File

@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2025 community-scripts ORG
# Author: CrazyWolf13
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/RayLabsHQ/gitea-mirror
# Source: https://github.com/arunavo4/gitea-mirror
APP="gitea-mirror"
var_tags="${var_tags:-mirror;gitea}"
@ -28,7 +28,7 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/RayLabsHQ/gitea-mirror/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
RELEASE=$(curl -fsSL https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.${APP} 2>/dev/null || cat /opt/${APP}_version.txt 2>/dev/null)" ]]; then
msg_info "Stopping Services"
@ -48,7 +48,7 @@ function update_script() {
msg_ok "Installed Bun"
rm -rf /opt/gitea-mirror
fetch_and_deploy_gh_release "gitea-mirror" "RayLabsHQ/gitea-mirror"
fetch_and_deploy_gh_release "gitea-mirror" "arunavo4/gitea-mirror"
msg_info "Updating and rebuilding ${APP} to v${RELEASE}"
cd /opt/gitea-mirror

View File

@ -20,33 +20,24 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -f /usr/local/bin/gitea ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://github.com/go-gitea/gitea/releases/latest | grep "title>Release" | cut -d " " -f 4 | sed 's/^v//')
if [[ "${RELEASE}" != "$(cat ~/.gitea 2>/dev/null)" ]] || [[ ! -f ~/.gitea ]]; then
msg_info "Stopping service"
systemctl stop gitea
msg_ok "Service stopped"
rm -rf /usr/local/bin/gitea
fetch_and_deploy_gh_release "gitea" "go-gitea/gitea" "singlefile" "latest" "/usr/local/bin" "gitea-*-linux-amd64"
chmod +x /usr/local/bin/gitea
msg_info "Starting service"
systemctl start gitea
msg_ok "Started service"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
header_info
check_container_storage
check_container_resources
if [[ ! -f /usr/local/bin/gitea ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://github.com/go-gitea/gitea/releases/latest | grep "title>Release" | cut -d " " -f 4 | sed 's/^v//')
msg_info "Updating $APP to ${RELEASE}"
FILENAME="gitea-$RELEASE-linux-amd64"
curl -fsSL "https://github.com/go-gitea/gitea/releases/download/v$RELEASE/gitea-$RELEASE-linux-amd64" -o $FILENAME
systemctl stop gitea
rm -rf /usr/local/bin/gitea
mv $FILENAME /usr/local/bin/gitea
chmod +x /usr/local/bin/gitea
systemctl start gitea
msg_ok "Updated $APP Successfully"
exit
}
start

View File

@ -28,19 +28,28 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/glanceapp/glance/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.glance 2>/dev/null)" ]] || [[ ! -f ~/.glance ]]; then
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
msg_info "Stopping Service"
systemctl stop glance
msg_ok "Stopped Service"
rm -f /opt/glance/glance
fetch_and_deploy_gh_release "glance" "glanceapp/glance" "prebuild" "latest" "/opt/glance" "glance-linux-amd64.tar.gz"
msg_info "Updating ${APP} to v${RELEASE}"
cd /opt
curl -fsSL "https://github.com/glanceapp/glance/releases/download/v${RELEASE}/glance-linux-amd64.tar.gz" -o $(basename "https://github.com/glanceapp/glance/releases/download/v${RELEASE}/glance-linux-amd64.tar.gz")
rm -rf /opt/glance/glance
tar -xzf glance-linux-amd64.tar.gz -C /opt/glance
echo "${RELEASE}" >"/opt/${APP}_version.txt"
msg_ok "Updated ${APP} to v${RELEASE}"
msg_info "Starting Service"
systemctl start glance
msg_ok "Started Service"
msg_info "Cleaning up"
rm -rf /opt/glance-linux-amd64.tar.gz
msg_ok "Cleaned"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}."

View File

@ -20,61 +20,48 @@ color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
header_info
check_container_storage
check_container_resources
if [[ ! -d "/opt/habitica" ]]; then
msg_error "No ${APP} Installation Found!"
if [[ ! -d "/opt/habitica" ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/HabitRPG/habitica/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
msg_info "Stopping $APP"
systemctl stop habitica-mongodb
systemctl stop habitica
systemctl stop habitica-client
msg_ok "Stopped $APP"
msg_info "Updating $APP to ${RELEASE}"
temp_file=$(mktemp)
curl -fsSL "https://github.com/HabitRPG/habitica/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf $temp_file
cp -rf habitica-${RELEASE}/* /opt/habitica
cd /opt/habitica
$STD npm i
echo "${RELEASE}" >/opt/${APP}_version.txt
msg_ok "Updated $APP to ${RELEASE}"
msg_info "Starting $APP"
systemctl start habitica-mongodb
systemctl start habitica
systemctl start habitica-client
msg_ok "Started $APP"
msg_info "Cleaning Up"
rm -f $temp_file
rm -rf ~/habitica-${RELEASE}
msg_ok "Cleanup Completed"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
fi
NODE_VERSION="20" NODE_MODULE="gulp-cli,mocha" setup_nodejs
RELEASE=$(curl -fsSL https://api.github.com/repos/HabitRPG/habitica/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.habitica 2>/dev/null)" ]] || [[ ! -f ~/.habitica ]]; then
msg_info "Stopping $APP"
systemctl stop habitica-mongodb
systemctl stop habitica
systemctl stop habitica-client
msg_ok "Stopped $APP"
msg_info "Save configuration"
if [[ -f /opt/habitica/config.json ]]; then
cp /opt/habitica/config.json ~/config.json
msg_ok "Saved configuration"
else
msg_warn "No configuration file found, skipping save"
fi
fetch_and_deploy_gh_release "habitica" "HabitRPG/habitica" "tarball" "latest" "/opt/habitica"
msg_info "Updating $APP to ${RELEASE}"
cd /opt/habitica
$STD npm i
$STD npm run postinstall
$STD npm run client:build
$STD gulp build:prod
msg_ok "Updated $APP to ${RELEASE}"
msg_info "Restoring configuration"
if [[ -f ~/config.json ]]; then
cp ~/config.json /opt/habitica/config.json
msg_ok "Restored configuration"
else
msg_warn "No configuration file found to restore"
fi
msg_info "Starting $APP"
systemctl start habitica-mongodb
systemctl start habitica
systemctl start habitica-client
msg_ok "Started $APP"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at ${RELEASE}"
fi
exit
}
start
@ -84,4 +71,4 @@ description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"

View File

@ -1,6 +1,6 @@
______ _ ___
/_ __/____(_) (_)_ ______ ___
/ / / ___/ / / / / / / __ `__ \
/ / / / / / / / /_/ / / / / / /
/_/ /_/ /_/_/_/\__,_/_/ /_/ /_/
______ _ ___ _ __ __
/_ __/____(_) (_)_ ______ ___ / | / /___ / /____ _____
/ / / ___/ / / / / / / __ `__ \ / |/ / __ \/ __/ _ \/ ___/
/ / / / / / / / /_/ / / / / / / / /| / /_/ / /_/ __(__ )
/_/ /_/ /_/_/_/\__,_/_/ /_/ /_/ /_/ |_/\____/\__/\___/____/

View File

@ -51,12 +51,134 @@ function update_script() {
fi
if [[ -f ~/.immich_library_revisions ]]; then
libraries=("libjxl" "libheif" "libraw" "imagemagick" "libvips")
cd "$BASE_DIR"
$STD git pull
for library in "${libraries[@]}"; do
compile_"$library"
done
msg_ok "Image-processing libraries updated"
readarray -d '' NEW_REVISIONS < <(for library in "${libraries[@]}"; do
echo "$library: $(curl -fsSL https://raw.githubusercontent.com/immich-app/base-images/refs/heads/main/server/sources/"$library".json | jq -cr '.revision' -)"
done)
UPDATED_REVISIONS="$(comm -13 <(sort ~/.immich_library_revisions) <(echo -n "${NEW_REVISIONS[@]}" | sort))"
if [[ "$UPDATED_REVISIONS" ]]; then
readarray -t NAMES < <(echo "$UPDATED_REVISIONS" | awk -F ':' '{print $1}')
rm -rf "$SOURCE_DIR"
mkdir -p "$SOURCE_DIR"
cd "$BASE_DIR"
$STD git pull
cd "$STAGING_DIR"
for name in "${NAMES[@]}"; do
if [[ "$name" == "libjxl" ]]; then
msg_info "Recompiling libjxl"
SOURCE=${SOURCE_DIR}/libjxl
JPEGLI_LIBJPEG_LIBRARY_SOVERSION="62"
JPEGLI_LIBJPEG_LIBRARY_VERSION="62.3.0"
: "${LIBJXL_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libjxl.json)}"
$STD git clone https://github.com/libjxl/libjxl.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBJXL_REVISION"
$STD git submodule update --init --recursive --depth 1 --recommend-shallow
$STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-empty-dht-marker.patch
$STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-icc-warning.patch
mkdir build
cd build
$STD cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_TESTING=OFF \
-DJPEGXL_ENABLE_DOXYGEN=OFF \
-DJPEGXL_ENABLE_MANPAGES=OFF \
-DJPEGXL_ENABLE_PLUGIN_GIMP210=OFF \
-DJPEGXL_ENABLE_BENCHMARK=OFF \
-DJPEGXL_ENABLE_EXAMPLES=OFF \
-DJPEGXL_FORCE_SYSTEM_BROTLI=ON \
-DJPEGXL_FORCE_SYSTEM_HWY=ON \
-DJPEGXL_ENABLE_JPEGLI=ON \
-DJPEGXL_ENABLE_JPEGLI_LIBJPEG=ON \
-DJPEGXL_INSTALL_JPEGLI_LIBJPEG=ON \
-DJPEGXL_ENABLE_PLUGINS=ON \
-DJPEGLI_LIBJPEG_LIBRARY_SOVERSION="$JPEGLI_LIBJPEG_LIBRARY_SOVERSION" \
-DJPEGLI_LIBJPEG_LIBRARY_VERSION="$JPEGLI_LIBJPEG_LIBRARY_VERSION" \
-DLIBJPEG_TURBO_VERSION_NUMBER=2001005 \
..
$STD cmake --build . -- -j"$(nproc)"
$STD cmake --install .
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
rm -rf "$SOURCE"/{build,third_party}
msg_ok "Recompiled libjxl"
fi
if [[ "$name" == "libheif" ]]; then
msg_info "Recompiling libheif"
SOURCE=${SOURCE_DIR}/libheif
: "${LIBHEIF_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libheif.json)}"
$STD git clone https://github.com/strukturag/libheif.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBHEIF_REVISION"
mkdir build
cd build
$STD cmake --preset=release-noplugins \
-DWITH_DAV1D=ON \
-DENABLE_PARALLEL_TILE_DECODING=ON \
-DWITH_LIBSHARPYUV=ON \
-DWITH_LIBDE265=ON \
-DWITH_AOM_DECODER=OFF \
-DWITH_AOM_ENCODER=OFF \
-DWITH_X265=OFF \
-DWITH_EXAMPLES=OFF \
..
$STD make install -j "$(nproc)"
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
rm -rf "$SOURCE"/build
msg_ok "Recompiled libheif"
fi
if [[ "$name" == "libraw" ]]; then
msg_info "Recompiling libraw"
SOURCE=${SOURCE_DIR}/libraw
: "${LIBRAW_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libraw.json)}"
$STD git clone https://github.com/libraw/libraw.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBRAW_REVISION"
$STD autoreconf --install
$STD ./configure
$STD make -j"$(nproc)"
$STD make install
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
msg_ok "Recompiled libraw"
fi
if [[ "$name" == "imagemagick" ]]; then
msg_info "Recompiling ImageMagick"
SOURCE=$SOURCE_DIR/imagemagick
: "${IMAGEMAGICK_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/imagemagick.json)}"
$STD git clone https://github.com/ImageMagick/ImageMagick.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$IMAGEMAGICK_REVISION"
$STD ./configure --with-modules
$STD make -j"$(nproc)"
$STD make install
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
msg_ok "Recompiled ImageMagick"
fi
if [[ "$name" == "libvips" ]]; then
msg_info "Recompiling libvips"
SOURCE=$SOURCE_DIR/libvips
: "${LIBVIPS_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libvips.json)}"
$STD git clone https://github.com/libvips/libvips.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBVIPS_REVISION"
$STD meson setup build --buildtype=release --libdir=lib -Dintrospection=disabled -Dtiff=disabled
cd build
$STD ninja install
ldconfig /usr/local/lib
cd "$STAGING_DIR"
rm -rf "$SOURCE"/build
msg_ok "Recompiled libvips"
fi
done
echo -n "${NEW_REVISIONS[@]}" >~/.immich_library_revisions
msg_ok "Image-processing libraries compiled"
fi
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/immich-app/immich/releases?per_page=1 | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ -f ~/.immich && "$RELEASE" == "$(cat ~/.immich)" ]]; then
@ -123,10 +245,6 @@ function update_script() {
cp -a server/{node_modules,dist,bin,resources,package.json,package-lock.json,start*.sh} "$APP_DIR"/
cp -a web/build "$APP_DIR"/www
cp LICENSE "$APP_DIR"
cd "$APP_DIR"
export SHARP_FORCE_GLOBAL_LIBVIPS=true
$STD npm install sharp
rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-x64}
msg_ok "Updated ${APP} web and microservices"
cd "$SRC_DIR"/machine-learning
@ -158,6 +276,8 @@ function update_script() {
ln -s "$GEO_DIR" "$APP_DIR"
msg_info "Updating Immich CLI"
$STD npm install --build-from-source sharp
rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-x64}
$STD npm i -g @immich/cli
msg_ok "Updated Immich CLI"
@ -173,144 +293,6 @@ function update_script() {
exit
}
function compile_libjxl() {
SOURCE=${SOURCE_DIR}/libjxl
JPEGLI_LIBJPEG_LIBRARY_SOVERSION="62"
JPEGLI_LIBJPEG_LIBRARY_VERSION="62.3.0"
: "${LIBJXL_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libjxl.json)}"
if [[ "${update:-}" ]] || [[ "$LIBJXL_REVISION" != "$(grep 'libjxl' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
msg_info "Recompiling libjxl"
if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
$STD git clone https://github.com/libjxl/libjxl.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBJXL_REVISION"
$STD git submodule update --init --recursive --depth 1 --recommend-shallow
$STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-empty-dht-marker.patch
$STD git apply "$BASE_DIR"/server/sources/libjxl-patches/jpegli-icc-warning.patch
mkdir build
cd build
$STD cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_TESTING=OFF \
-DJPEGXL_ENABLE_DOXYGEN=OFF \
-DJPEGXL_ENABLE_MANPAGES=OFF \
-DJPEGXL_ENABLE_PLUGIN_GIMP210=OFF \
-DJPEGXL_ENABLE_BENCHMARK=OFF \
-DJPEGXL_ENABLE_EXAMPLES=OFF \
-DJPEGXL_FORCE_SYSTEM_BROTLI=ON \
-DJPEGXL_FORCE_SYSTEM_HWY=ON \
-DJPEGXL_ENABLE_JPEGLI=ON \
-DJPEGXL_ENABLE_JPEGLI_LIBJPEG=ON \
-DJPEGXL_INSTALL_JPEGLI_LIBJPEG=ON \
-DJPEGXL_ENABLE_PLUGINS=ON \
-DJPEGLI_LIBJPEG_LIBRARY_SOVERSION="$JPEGLI_LIBJPEG_LIBRARY_SOVERSION" \
-DJPEGLI_LIBJPEG_LIBRARY_VERSION="$JPEGLI_LIBJPEG_LIBRARY_VERSION" \
-DLIBJPEG_TURBO_VERSION_NUMBER=2001005 \
..
$STD cmake --build . -- -j"$(nproc)"
$STD cmake --install .
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
rm -rf "$SOURCE"/{build,third_party}
msg_ok "Recompiled libjxl"
fi
}
function compile_libheif() {
SOURCE=${SOURCE_DIR}/libheif
if ! dpkg -l | grep -q libaom; then
$STD apt-get install -y libaom-dev
local update="required"
fi
: "${LIBHEIF_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libheif.json)}"
if [[ "${update:-}" ]] || [[ "$LIBHEIF_REVISION" != "$(grep 'libheif' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
msg_info "Recompiling libheif"
if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
$STD git clone https://github.com/strukturag/libheif.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBHEIF_REVISION"
mkdir build
cd build
$STD cmake --preset=release-noplugins \
-DWITH_DAV1D=ON \
-DENABLE_PARALLEL_TILE_DECODING=ON \
-DWITH_LIBSHARPYUV=ON \
-DWITH_LIBDE265=ON \
-DWITH_AOM_DECODER=OFF \
-DWITH_AOM_ENCODER=ON \
-DWITH_X265=OFF \
-DWITH_EXAMPLES=OFF \
..
$STD make install -j "$(nproc)"
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
rm -rf "$SOURCE"/build
msg_ok "Recompiled libheif"
fi
}
function compile_libraw() {
SOURCE=${SOURCE_DIR}/libraw
local update
: "${LIBRAW_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libraw.json)}"
if [[ "${update:-}" ]] || [[ "$LIBRAW_REVISION" != "$(grep 'libraw' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
msg_info "Recompiling libraw"
if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
$STD git clone https://github.com/libraw/libraw.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBRAW_REVISION"
$STD autoreconf --install
$STD ./configure
$STD make -j"$(nproc)"
$STD make install
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
msg_ok "Recompiled libraw"
fi
}
function compile_imagemagick() {
SOURCE=$SOURCE_DIR/imagemagick
: "${IMAGEMAGICK_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/imagemagick.json)}"
if [[ "${update:-}" ]] || [[ "$IMAGEMAGICK_REVISION" != "$(grep 'imagemagick' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
msg_info "Recompiling ImageMagick"
if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
$STD git clone https://github.com/ImageMagick/ImageMagick.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$IMAGEMAGICK_REVISION"
$STD ./configure --with-modules
$STD make -j"$(nproc)"
$STD make install
ldconfig /usr/local/lib
$STD make clean
cd "$STAGING_DIR"
msg_ok "Recompiled ImageMagick"
fi
}
function compile_libvips() {
SOURCE=$SOURCE_DIR/libvips
# : "${LIBVIPS_REVISION:=$(jq -cr '.revision' "$BASE_DIR"/server/sources/libvips.json)}"
: "${LIBVIPS_REVISION:=8fa37a64547e392d3808eed8d72adab7e02b3d00}"
if [[ "${update:-}" ]] || [[ "$LIBVIPS_REVISION" != "$(grep 'libvips' ~/.immich_library_revisions | awk '{print $2}')" ]]; then
msg_info "Recompiling libvips"
if [[ -d "$SOURCE" ]]; then rm -rf "$SOURCE"; fi
$STD git clone https://github.com/libvips/libvips.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBVIPS_REVISION"
$STD meson setup build --buildtype=release --libdir=lib -Dintrospection=disabled -Dtiff=disabled
cd build
$STD ninja install
ldconfig /usr/local/lib
cd "$STAGING_DIR"
rm -rf "$SOURCE"/build
msg_ok "Recompiled libvips"
fi
}
start
build_container
description

View File

@ -9,7 +9,7 @@ APP="karakeep"
var_tags="${var_tags:-bookmark}"
var_cpu="${var_cpu:-2}"
var_ram="${var_ram:-4096}"
var_disk="${var_disk:-10}"
var_disk="${var_disk:-14}"
var_os="${var_os:-debian}"
var_version="${var_version:-12}"
var_unprivileged="${var_unprivileged:-1}"

View File

@ -27,31 +27,18 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
if [[ "${RELEASE}" != "$(cat ~/.mafl 2>/dev/null)" ]] || [[ ! -f ~/.mafl ]]; then
msg_info "Stopping Mafl service"
systemctl stop mafl
msg_ok "Service stopped"
msg_info "Performing backup"
mkdir -p /opt/mafl-backup/data
mv /opt/mafl/data /opt/mafl-backup/data
rm -rf /opt/mafl
msg_ok "Backup complete"
fetch_and_deploy_gh_release "mafl" "hywax/mafl"
msg_info "Updating Mafl to v${RELEASE}"
cd /opt/mafl
yarn install
yarn build
mv /opt/mafl-backup/data /opt/mafl/data
systemctl start mafl
msg_ok "Updated Mafl to v${RELEASE}"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
msg_info "Updating Mafl to v${RELEASE} (Patience)"
systemctl stop mafl
curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o $(basename "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz")
tar -xzf v${RELEASE}.tar.gz
cp -r mafl-${RELEASE}/* /opt/mafl/
rm -rf mafl-${RELEASE}
cd /opt/mafl
yarn install
yarn build
systemctl start mafl
msg_ok "Updated Mafl to v${RELEASE}"
exit
}

View File

@ -40,7 +40,6 @@ function update_script() {
msg_info "Updating Ollama to ${RELEASE}"
rm -rf /usr/local/lib/ollama
rm -rf /usr/local/bin/ollama
mkdir -p /usr/local/lib/ollama
tar -xzf "${TMP_TAR}" -C /usr/local/lib/ollama
ln -sf /usr/local/lib/ollama/bin/ollama /usr/local/bin/ollama
echo "${RELEASE}" >/opt/Ollama_version.txt

View File

@ -66,7 +66,6 @@ function update_script() {
$STD dpkg -i "$OMADA_PKG"
rm -f "$OMADA_PKG"
msg_ok "Updated Omada Controller"
exit 0
}
start

View File

@ -40,20 +40,20 @@ function update_script() {
mkdir -p /opt/planka-backup/user-avatars
mkdir -p /opt/planka-backup/background-images
mkdir -p /opt/planka-backup/attachments
mv /opt/planka/.env /opt/planka-backup
[ -n "$(ls -A /opt/planka/public/favicons 2>/dev/null)" ] && mv /opt/planka/public/favicons/* /opt/planka-backup/favicons/
[ -n "$(ls -A /opt/planka/public/user-avatars 2>/dev/null)" ] && mv /opt/planka/public/user-avatars/* /opt/planka-backup/user-avatars/
[ -n "$(ls -A /opt/planka/public/background-images 2>/dev/null)" ] && mv /opt/planka/public/background-images/* /opt/planka-backup/background-images/
[ -n "$(ls -A /opt/planka/private/attachments 2>/dev/null)" ] && mv /opt/planka/private/attachments/* /opt/planka-backup/attachments/
mv /opt/planka/planka/.env /opt/planka-backup
[ -n "$(ls -A /opt/planka/planka/public/favicons 2>/dev/null)" ] && mv /opt/planka/planka/public/favicons/* /opt/planka-backup/favicons/
[ -n "$(ls -A /opt/planka/planka/public/user-avatars 2>/dev/null)" ] && mv /opt/planka/planka/public/user-avatars/* /opt/planka-backup/user-avatars/
[ -n "$(ls -A /opt/planka/planka/public/background-images 2>/dev/null)" ] && mv /opt/planka/planka/public/background-images/* /opt/planka-backup/background-images/
[ -n "$(ls -A /opt/planka/planka/private/attachments 2>/dev/null)" ] && mv /opt/planka/planka/private/attachments/* /opt/planka-backup/attachments/
rm -rf /opt/planka
fetch_and_deploy_gh_release "planka" "plankanban/planka" "prebuild" "latest" "/opt/planka" "planka-prebuild.zip"
cd /opt/planka
cd /opt/planka/planka
$STD npm install
mv /opt/planka-backup/.env /opt/planka/
[ -n "$(ls -A /opt/planka-backup/favicons 2>/dev/null)" ] && mv /opt/planka-backup/favicons/* /opt/planka/public/favicons/
[ -n "$(ls -A /opt/planka-backup/user-avatars 2>/dev/null)" ] && mv /opt/planka-backup/user-avatars/* /opt/planka/public/user-avatars/
[ -n "$(ls -A /opt/planka-backup/background-images 2>/dev/null)" ] && mv /opt/planka-backup/background-images/* /opt/planka/public/background-images/
[ -n "$(ls -A /opt/planka-backup/attachments 2>/dev/null)" ] && mv /opt/planka-backup/attachments/* /opt/planka/private/attachments/
mv /opt/planka-backup/.env /opt/planka/planka/
[ -n "$(ls -A /opt/planka-backup/favicons 2>/dev/null)" ] && mv /opt/planka-backup/favicons/* /opt/planka/planka/public/favicons/
[ -n "$(ls -A /opt/planka-backup/user-avatars 2>/dev/null)" ] && mv /opt/planka-backup/user-avatars/* /opt/planka/planka/public/user-avatars/
[ -n "$(ls -A /opt/planka-backup/background-images 2>/dev/null)" ] && mv /opt/planka-backup/background-images/* /opt/planka/planka/public/background-images/
[ -n "$(ls -A /opt/planka-backup/attachments 2>/dev/null)" ] && mv /opt/planka-backup/attachments/* /opt/planka/planka/private/attachments/
msg_ok "Updated $APP to ${RELEASE}"
msg_info "Starting $APP"

View File

@ -86,4 +86,4 @@ msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Configure your reverse proxy to point to:${BGN} ${IP}:1411${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}https://{PUBLIC_URL}/setup${CL}"
echo -e "${TAB}${GATEWAY}${BGN}https://{PUBLIC_URL}/login/setup${CL}"

View File

@ -58,7 +58,6 @@ function update_script() {
else
msg_ok "No update required. ${APP} is already at ${RELEASE}."
fi
exit
}
start

View File

@ -27,41 +27,29 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Stirling-Tools/Stirling-PDF/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(cat ~/.stirling-pdf 2>/dev/null)" ]] || [[ ! -f ~/.stirling-pdf ]]; then
if [[ ! -f /etc/systemd/system/unoserver.service ]]; then
msg_custom "⚠️ " "\e[33m" "Legacy installation detected please recreate the container using the latest install script."
exit 0
fi
PYTHON_VERSION="3.12" setup_uv
JAVA_VERSION="21" setup_java
msg_info "Stopping Services"
systemctl stop stirlingpdf libreoffice-listener unoserver
msg_ok "Stopped Services"
if [[ -f ~/.Stirling-PDF-login ]]; then
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF-with-login.jar"
mv /opt/Stirling-PDF/Stirling-PDF-with-login.jar /opt/Stirling-PDF/Stirling-PDF.jar
else
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF.jar"
fi
msg_info "Refreshing Font Cache"
$STD fc-cache -fv
msg_ok "Font Cache Updated"
msg_info "Starting Services"
systemctl start stirlingpdf libreoffice-listener unoserver
msg_ok "Started Services"
msg_ok "Update Successful"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
msg_info "Updating ${APP}"
systemctl stop stirlingpdf
if [[ -n $(dpkg -l | grep -w ocrmypdf) ]] && [[ -z $(dpkg -l | grep -w qpdf) ]]; then
$STD apt-get remove -y ocrmypdf
$STD apt-get install -y qpdf
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/Stirling-Tools/Stirling-PDF/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/Stirling-Tools/Stirling-PDF/archive/refs/tags/v$RELEASE.tar.gz" -o $(basename "https://github.com/Stirling-Tools/Stirling-PDF/archive/refs/tags/v$RELEASE.tar.gz")
tar -xzf v$RELEASE.tar.gz
cd Stirling-PDF-$RELEASE
chmod +x ./gradlew
$STD ./gradlew build
rm -rf /opt/Stirling-PDF/Stirling-PDF-*.jar
cp -r ./build/libs/Stirling-PDF-*.jar /opt/Stirling-PDF/
cp -r scripts /opt/Stirling-PDF/
cd ~
rm -rf Stirling-PDF-$RELEASE v$RELEASE.tar.gz
ln -sf /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
systemctl start stirlingpdf
msg_ok "Updated ${APP} to v$RELEASE"
exit
}
start
build_container
description

View File

@ -27,24 +27,12 @@ function update_script() {
msg_error "No ${APP} Installation Found!"
exit
fi
RELEASE=$(curl -fsSL https://api.github.com/repos/threadfin/threadfin/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
if [[ "${RELEASE}" != "$(cat ~/.threadfin 2>/dev/null)" ]] || [[ ! -f ~/.threadfin ]]; then
msg_info "Stopping $APP"
systemctl stop threadfin
msg_ok "Stopped $APP"
fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
msg_info "Starting $APP"
systemctl start threadfin
msg_ok "Started $APP"
msg_ok "Updated Successfully"
else
msg_ok "No update required. ${APP} is already at v${RELEASE}"
fi
msg_info "Updating $APP"
systemctl stop threadfin.service
curl -fsSL "https://github.com/Threadfin/Threadfin/releases/latest/download/Threadfin_linux_amd64" -o "/opt/threadfin/threadfin"
chmod +x /opt/threadfin/threadfin
systemctl start threadfin.service
msg_ok "Updated $APP"
exit
}

View File

@ -5,7 +5,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/TriliumNext/Trilium
APP="Trilium"
APP="Trilium Notes"
var_tags="${var_tags:-notes}"
var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-512}"

View File

@ -37,16 +37,12 @@
"type": "info"
},
{
"text": "Configuration: `nano /opt/iptag/iptag.conf`. iptag Service must be restarted after change. See here for full documentation: `https://github.com/community-scripts/ProxmoxVE/discussions/5790`",
"text": "Configuration: `nano /opt/iptag/iptag.conf`. iptag.service must be restarted after change.",
"type": "info"
},
{
"text": "The Proxmox Node must contain ipcalc and net-tools. `apt-get install -y ipcalc net-tools`",
"type": "warning"
},
{
"text": "You can execute the ip tool manually with `iptag-run`",
"type": "info"
}
]
}

View File

@ -31,10 +31,5 @@
"username": null,
"password": null
},
"notes": [
{
"text": "WARNING: Installation sources scripts outside of Community Scripts repo. Please check the source before installing.",
"type": "warning"
}
]
"notes": []
}

View File

@ -6,7 +6,7 @@
],
"date_created": "2024-05-02",
"type": "ct",
"updateable": true,
"updateable": false,
"privileged": false,
"interface_port": 5001,
"documentation": null,

View File

@ -31,10 +31,5 @@
"username": null,
"password": null
},
"notes": [
{
"text": "Use `cat ~/docmost.creds` to see database credentials.",
"type": "info"
}
]
"notes": []
}

View File

@ -6,7 +6,7 @@
],
"date_created": "2024-05-02",
"type": "ct",
"updateable": true,
"updateable": false,
"privileged": false,
"interface_port": 18083,
"documentation": "https://docs.emqx.com/en/emqx/latest/",

View File

@ -19,7 +19,7 @@
"type": "default",
"script": "ct/ersatztv.sh",
"resources": {
"cpu": 2,
"cpu": 1,
"ram": 1024,
"hdd": 5,
"os": "debian",

View File

@ -9,9 +9,9 @@
"updateable": true,
"privileged": false,
"interface_port": 4321,
"documentation": "https://github.com/RayLabsHQ/gitea-mirror/",
"documentation": "https://github.com/arunavo4/gitea-mirror/",
"config_path": "/etc/systemd/system/gitea-mirror.service",
"website": "https://github.com/RayLabsHQ/gitea-mirror/",
"website": "https://github.com/arunavo4/gitea-mirror/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/gitea-mirror.webp",
"description": "Gitea Mirror auto-syncs GitHub repos to your self-hosted Gitea, with a sleek Web UI and easy Docker deployment. ",
"install_methods": [

View File

@ -8,7 +8,7 @@
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 3000,
"interface_port": 8080,
"documentation": "https://github.com/HabitRPG/habitica/wiki",
"website": "https://habitica.com/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/habitica.webp",

View File

@ -21,7 +21,7 @@
"resources": {
"cpu": 2,
"ram": 4096,
"hdd": 10,
"hdd": 14,
"os": "debian",
"version": "12"
}

View File

@ -35,10 +35,6 @@
{
"text": "Set a root password if using autologin. This will be the PBS password. `passwd root`",
"type": "warning"
},
{
"text": "Advanced Install is only possible without root password and root SSH access, you can configure this after installation.",
"type": "warning"
}
]
}

View File

@ -1,35 +1,35 @@
{
"name": "Stirling-PDF",
"slug": "stirling-pdf",
"categories": [
12
],
"date_created": "2024-05-02",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 8080,
"documentation": null,
"website": "https://github.com/Stirling-Tools/Stirling-PDF",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/stirling-pdf.webp",
"config_path": "/opt/Stirling-PDF/.env",
"description": "Stirling-PDF is a powerful locally hosted web based PDF manipulation tool that allows you to perform various operations on PDF files, such as splitting merging, converting, reorganizing, adding images, rotating, compressing, and more.",
"install_methods": [
{
"type": "default",
"script": "ct/stirling-pdf.sh",
"resources": {
"cpu": 2,
"ram": 2048,
"hdd": 8,
"os": "debian",
"version": "12"
}
}
],
"default_credentials": {
"username": "admin",
"password": "stirling"
},
"notes": []
"name": "Stirling-PDF",
"slug": "stirling-pdf",
"categories": [
12
],
"date_created": "2024-05-02",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 8080,
"documentation": null,
"website": "https://github.com/Stirling-Tools/Stirling-PDF",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/stirling-pdf.webp",
"config_path": "/opt/Stirling-PDF/.env",
"description": "Stirling-PDF is a powerful locally hosted web based PDF manipulation tool that allows you to perform various operations on PDF files, such as splitting merging, converting, reorganizing, adding images, rotating, compressing, and more.",
"install_methods": [
{
"type": "default",
"script": "ct/stirling-pdf.sh",
"resources": {
"cpu": 2,
"ram": 2048,
"hdd": 8,
"os": "debian",
"version": "12"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": []
}

View File

@ -6,7 +6,7 @@
],
"date_created": "2024-06-12",
"type": "ct",
"updateable": true,
"updateable": false,
"privileged": false,
"interface_port": 34400,
"documentation": null,

File diff suppressed because it is too large Load Diff

View File

@ -14,12 +14,11 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
apt-transport-https \
alsa-utils \
libxext-dev \
fontconfig \
libva-drm2
$STD apt-get install -y apt-transport-https
$STD apt-get install -y alsa-utils
$STD apt-get install -y libxext-dev
$STD apt-get install -y fontconfig
$STD apt-get install -y libva-drm2
msg_ok "Installed Dependencies"
msg_info "Installing AgentDVR"
@ -28,6 +27,7 @@ RELEASE=$(curl -fsSL "https://www.ispyconnect.com/api/Agent/DownloadLocation4?pl
cd /opt/agentdvr/agent
curl -fsSL "$RELEASE" -o $(basename "$RELEASE")
$STD unzip Agent_Linux64*.zip
rm -rf Agent_Linux64*.zip
chmod +x ./Agent
msg_ok "Installed AgentDVR"
@ -54,7 +54,6 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf Agent_Linux64*.zip
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -67,14 +67,5 @@ if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
msg_ok "Installed Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
fi
read -r -p "${TAB3}Would you like to expose the Docker TCP socket? <y/N> " prompt
if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
msg_info "Exposing Docker TCP socket"
$STD mkdir -p /etc/docker
$STD echo '{ "hosts": ["unix:///var/run/docker.sock", "tcp://0.0.0.0:2375"] }' > /etc/docker/daemon.json
$STD rc-service docker restart
msg_ok "Exposed Docker TCP socket at tcp://+:2375"
fi
motd_ssh
customize

View File

@ -24,13 +24,13 @@ RELEASE=$(curl -s https://api.github.com/repos/steveiliop56/tinyauth/releases/la
curl -fsSL "https://github.com/steveiliop56/tinyauth/releases/download/v${RELEASE}/tinyauth-amd64" -o /opt/tinyauth/tinyauth
chmod +x /opt/tinyauth/tinyauth
PASS=$(openssl rand -base64 8 | tr -dc 'a-zA-Z0-9' | head -c 8)
USER=$(htpasswd -Bbn "tinyauth" "${PASS}")
PASSWORD=$(openssl rand -base64 8 | tr -dc 'a-zA-Z0-9' | head -c 8)
USER=$(htpasswd -Bbn "tinyauth" "${PASSWORD}")
cat <<EOF >/opt/tinyauth/credentials.txt
cat <<EOF > /opt/tinyauth/credentials.txt
Tinyauth Credentials
Username: tinyauth
Password: ${PASS}
Password: ${PASSWORD}
EOF
echo "${RELEASE}" >/opt/tinyauth_version.txt

View File

@ -13,9 +13,13 @@ setting_up_container
network_check
update_os
fetch_and_deploy_gh_release "authelia" "authelia/authelia" "binary"
msg_info "Installing Authelia"
RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
curl -fsSL "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb" -o "authelia_${RELEASE}_amd64.deb"
$STD dpkg -i "authelia_${RELEASE}_amd64.deb"
msg_ok "Install Authelia completed"
read -rp "${TAB3}Enter your domain (ex. example.com): " DOMAIN
read -p "${TAB3}Enter your domain (ex. example.com): " DOMAIN
msg_info "Setting Authelia up"
touch /etc/authelia/emails.txt
@ -68,6 +72,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f "authelia_${RELEASE}_amd64.deb"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -24,10 +24,13 @@ $STD apt-get install -y \
msg_ok "Installed Dependencies"
setup_uv
fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy"
msg_info "Installing Babybuddy"
mkdir -p /opt/data
RELEASE=$(curl -fsSL https://api.github.com/repos/babybuddy/babybuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
temp_file=$(mktemp)
mkdir -p /opt/{babybuddy,data}
curl -fsSL "https://github.com/babybuddy/babybuddy/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/babybuddy
cd /opt/babybuddy
$STD uv venv .venv
$STD source .venv/bin/activate
@ -99,6 +102,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -13,9 +13,14 @@ setting_up_container
network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
apache2 \
libapache2-mod-php \
php-{pgsql,dom}
msg_ok "Installed Dependencies"
PG_VERSION="16" setup_postgresql
PHP_APACHE="YES" PHP_MODULE="pgsql" PHP_VERSION="8.2" setup_php
fetch_and_deploy_gh_release "baikal" "sabre-io/Baikal"
msg_info "Setting up PostgreSQL Database"
DB_NAME=baikal
@ -31,7 +36,11 @@ $STD sudo -u postgres psql -c "CREATE DATABASE $DB_NAME WITH OWNER $DB_USER TEMP
} >>~/baikal.creds
msg_ok "Set up PostgreSQL Database"
msg_info "Configuring Baikal"
msg_info "Installing Baikal"
RELEASE=$(curl -fsSL https://api.github.com/repos/sabre-io/Baikal/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
cd /opt
curl -fsSL "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip" -o "baikal-${RELEASE}.zip"
$STD unzip "baikal-${RELEASE}.zip"
cat <<EOF >/opt/baikal/config/baikal.yaml
database:
backend: pgsql
@ -42,6 +51,7 @@ database:
EOF
chown -R www-data:www-data /opt/baikal/
chmod -R 755 /opt/baikal/
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Installed Baikal"
msg_info "Creating Service"
@ -80,6 +90,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf "/opt/baikal-${RELEASE}.zip"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -14,15 +14,22 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y redis
$STD apt-get install -y \
apache2 \
redis \
php-{curl,date,json,mbstring,redis,sqlite3,sockets} \
libapache2-mod-php
msg_ok "Installed Dependencies"
PHP_VERSION="8.2" PHP_APACHE="YES" PHP_MODULE="redis, sqlite3" setup_php
fetch_and_deploy_gh_release "barcodebuddy" "Forceu/barcodebuddy"
msg_info "Configuring barcodebuddy"
msg_info "Installing barcodebuddy"
RELEASE=$(curl -fsSL https://api.github.com/repos/Forceu/barcodebuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
cd /opt
curl -fsSL "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip" -o "v${RELEASE}.zip"
$STD unzip "v${RELEASE}.zip"
mv "/opt/barcodebuddy-${RELEASE}" /opt/barcodebuddy
chown -R www-data:www-data /opt/barcodebuddy/data
msg_ok "Configured barcodebuddy"
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Installed barcodebuddy"
msg_info "Creating Services"
cat <<EOF >/etc/systemd/system/barcodebuddy.service
@ -66,6 +73,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf "/opt/v${RELEASE}.zip"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -22,10 +22,17 @@ msg_ok "Installed Dependencies"
PG_VERSION="16" setup_postgresql
setup_go
fetch_and_deploy_gh_release "bitmagnet" "bitmagnet-io/bitmagnet"
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
msg_info "Setting up database"
msg_info "Installing bitmagnet v${RELEASE}"
mkdir -p /opt/bitmagnet
temp_file=$(mktemp)
curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
cd /opt/bitmagnet
VREL=v$RELEASE
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
chmod +x bitmagnet
POSTGRES_PASSWORD=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
$STD sudo -u postgres psql -c "ALTER USER postgres WITH PASSWORD '$POSTGRES_PASSWORD';"
$STD sudo -u postgres psql -c "CREATE DATABASE bitmagnet;"
@ -34,14 +41,8 @@ $STD sudo -u postgres psql -c "CREATE DATABASE bitmagnet;"
echo ""
echo "postgres user password: $POSTGRES_PASSWORD"
} >>~/postgres.creds
msg_ok "Database set up"
msg_info "Configuring bitmagnet v${RELEASE}"
cd /opt/bitmagnet
VREL=v$RELEASE
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
chmod +x bitmagnet
msg_ok "Configured bitmagnet v${RELEASE}"
echo "${RELEASE}" >/opt/bitmagnet_version.txt
msg_ok "Installed bitmagnet v${RELEASE}"
read -r -p "${TAB3}Enter your TMDB API key if you have one: " tmdbapikey
@ -71,6 +72,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -16,12 +16,12 @@ update_os
msg_info "Installing Dependencies (Patience)"
$STD apt-get install -y \
apache2 \
php8.2-{mbstring,gd,fpm,curl,intl,ldap,tidy,bz2,mysql,zip,xml} \
composer \
libapache2-mod-php \
make
msg_ok "Installed Dependencies"
PHP_MODULE="ldap,tidy,bz2,mysqli" PHP_FPM="YES" PHP_APACHE="YES" PHP_VERSION="8.3" setup_php
setup_composer
setup_mariadb
msg_info "Setting up Database"
@ -39,10 +39,13 @@ $STD mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUS
} >>~/bookstack.creds
msg_ok "Set up database"
fetch_and_deploy_gh_release "bookstack" "BookStackApp/BookStack"
msg_info "Setup Bookstack (Patience)"
LOCAL_IP="$(hostname -I | awk '{print $1}')"
msg_info "Configuring Bookstack (Patience)"
cd /opt
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/BookStackApp/BookStack/archive/refs/tags/v${RELEASE}.zip" -o "v${RELEASE}.zip"
$STD unzip v${RELEASE}.zip
mv BookStack-${RELEASE} /opt/bookstack
cd /opt/bookstack
cp .env.example .env
sudo sed -i "s|APP_URL=.*|APP_URL=http://$LOCAL_IP|g" /opt/bookstack/.env
@ -57,8 +60,9 @@ chmod -R 755 /opt/bookstack /opt/bookstack/bootstrap/cache /opt/bookstack/public
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
chmod -R 640 /opt/bookstack/.env
$STD a2enmod rewrite
$STD a2enmod php8.3
msg_ok "Configured Bookstack"
$STD a2enmod php8.2
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
msg_ok "Installed Bookstack"
msg_info "Creating Service"
cat <<EOF >/etc/apache2/sites-available/bookstack.conf
@ -107,6 +111,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf /opt/v${RELEASE}.zip
$STD apt-get autoremove
$STD apt-get autoclean
msg_ok "Cleaned"

View File

@ -18,12 +18,19 @@ $STD apt-get install -y apt-transport-https
$STD apt-get install -y lsb-release
msg_ok "Installed Dependencies"
msg_info "Installing Nginx"
curl -fsSL "https://nginx.org/keys/nginx_signing.key" | gpg --dearmor >/usr/share/keyrings/nginx-archive-keyring.gpg
echo "deb [signed-by=/usr/share/keyrings/nginx-archive-keyring.gpg] http://nginx.org/packages/debian $(lsb_release -cs) nginx" >/etc/apt/sources.list.d/nginx.list
$STD apt-get update
$STD apt-get install -y nginx=1.26.3*
msg_ok "Installed Nginx"
RELEASE=$(curl -fsSL https://api.github.com/repos/bunkerity/bunkerweb/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
msg_info "Installing BunkerWeb v${RELEASE} (Patience)"
curl -fsSL -o install-bunkerweb.sh https://github.com/bunkerity/bunkerweb/raw/v${RELEASE}/misc/install-bunkerweb.sh
chmod +x install-bunkerweb.sh
$STD ./install-bunkerweb.sh --yes
$STD apt-mark unhold bunkerweb nginx
curl -fsSL "https://repo.bunkerweb.io/bunkerity/bunkerweb/gpgkey" | gpg --dearmor >/etc/apt/keyrings/bunkerity_bunkerweb-archive-keyring.gpg
echo "deb [signed-by=/etc/apt/keyrings/bunkerity_bunkerweb-archive-keyring.gpg] https://repo.bunkerweb.io/bunkerity/bunkerweb/debian/ bookworm main" >/etc/apt/sources.list.d/bunkerity_bunkerweb.list
$STD apt-get update
$STD apt-get install -y bunkerweb=${RELEASE}
cat <<EOF >/etc/apt/preferences.d/bunkerweb
Package: bunkerweb
Pin: version ${RELEASE}

View File

@ -14,17 +14,22 @@ network_check
update_os
NODE_VERSION="22" setup_nodejs
fetch_and_deploy_gh_release "bytestash" "jordan-dalby/ByteStash"
msg_info "Installing ByteStash"
JWT_SECRET=$(openssl rand -base64 32 | tr -d '/+=')
temp_file=$(mktemp)
RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/jordan-dalby/ByteStash/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf $temp_file
mv ByteStash-${RELEASE} /opt/bytestash
cd /opt/bytestash/server
$STD npm install
cd /opt/bytestash/client
$STD npm install
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
msg_ok "Installed ByteStash"
read -rp "${TAB3}Do you want to allow registration of multiple accounts? [y/n]: " allowreg
read -p "${TAB3}Do you want to allow registration of multiple accounts? [y/n]: " allowreg
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/bytestash-backend.service
@ -68,6 +73,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -17,8 +17,22 @@ msg_info "Installing Dependencies"
$STD apt-get install -y rsync
msg_ok "Installed Dependencies"
JAVA_VERSION="17" setup_java
fetch_and_deploy_gh_release "commafeed" "Athou/commafeed" "prebuild" "latest" "/opt/commafeed" "commafeed-*-h2-jvm.zip"
msg_info "Installing Azul Zulu"
curl -fsSL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xB1998361219BD9C9" -o "/etc/apt/trusted.gpg.d/zulu-repo.asc"
curl -fsSL "https://cdn.azul.com/zulu/bin/zulu-repo_1.0.0-3_all.deb" -o "zulu-repo_1.0.0-3_all.deb"
$STD dpkg -i zulu-repo_1.0.0-3_all.deb
$STD apt-get update
$STD apt-get -y install zulu17-jdk
msg_ok "Installed Azul Zulu"
RELEASE=$(curl -fsSL https://api.github.com/repos/Athou/commafeed/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
msg_info "Installing CommaFeed ${RELEASE}"
mkdir /opt/commafeed
curl -fsSL "https://github.com/Athou/commafeed/releases/download/${RELEASE}/commafeed-${RELEASE}-h2-jvm.zip" -o "commafeed-${RELEASE}-h2-jvm.zip"
$STD unzip commafeed-${RELEASE}-h2-jvm.zip
mv commafeed-${RELEASE}-h2/* /opt/commafeed/
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Installed CommaFeed ${RELEASE}"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/commafeed.service
@ -41,6 +55,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf commafeed-${RELEASE}-h2 commafeed-${RELEASE}-h2-jvm.zip zulu-repo_1.0.0-3_all.deb
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -14,15 +14,20 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y git
$STD apt-get install -y \
git
msg_ok "Installed Dependencies"
NODE_VERSION="22" setup_nodejs
read -rp "${TAB3}Install OnlyOffice components instead of CKEditor? (Y/N): " onlyoffice
fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad"
read -p "${TAB3}Install OnlyOffice components instead of CKEditor? (Y/N): " onlyoffice
msg_info "Setup ${APPLICATION}"
temp_file=$(mktemp)
RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
curl -fsSL "https://github.com/cryptpad/cryptpad/archive/refs/tags/${RELEASE}.tar.gz" -o "$temp_file"
tar zxf $temp_file
mv cryptpad-$RELEASE /opt/cryptpad
cd /opt/cryptpad
$STD npm ci
$STD npm run install:components
@ -34,6 +39,7 @@ sed -i "80s#//httpAddress: 'localhost'#httpAddress: '0.0.0.0'#g" /opt/cryptpad/c
if [[ "$onlyoffice" =~ ^[Yy]$ ]]; then
$STD bash -c "./install-onlyoffice.sh --accept-license"
fi
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Setup ${APPLICATION}"
msg_info "Creating Service"
@ -63,6 +69,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -14,12 +14,15 @@ network_check
update_os
NODE_VERSION="22" setup_nodejs
fetch_and_deploy_gh_release "dashy" "Lissy93/dashy"
RELEASE=$(curl -fsSL https://api.github.com/repos/Lissy93/dashy/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
msg_info "Installing Dashy ${RELEASE} (Patience)"
mkdir -p /opt/dashy
curl -fsSL "https://github.com/Lissy93/dashy/archive/refs/tags/${RELEASE}.tar.gz" | tar -xz -C /opt/dashy --strip-components=1
cd /opt/dashy
$STD npm install
$STD npm run build
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Installed Dashy ${RELEASE}"
msg_info "Creating Service"

View File

@ -29,17 +29,7 @@ echo -e '{\n "log-driver": "journald"\n}' >/etc/docker/daemon.json
$STD sh <(curl -fsSL https://get.docker.com)
msg_ok "Installed Docker $DOCKER_LATEST_VERSION"
read -r -p "${TAB3}Install Docker Compose v2 plugin? <y/N> " prompt_compose
if [[ ${prompt_compose,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
mkdir -p /usr/local/lib/docker/cli-plugins
curl -fsSL "https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_LATEST_VERSION}/docker-compose-$(uname -s)-$(uname -m)" \
-o /usr/local/lib/docker/cli-plugins/docker-compose
chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
msg_ok "Installed Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
fi
read -r -p "${TAB3}Would you like to add Portainer (UI)? <y/N> " prompt
read -r -p "${TAB3}Would you like to add Portainer? <y/N> " prompt
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing Portainer $PORTAINER_LATEST_VERSION"
docker volume create portainer_data >/dev/null
@ -53,9 +43,9 @@ if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
portainer/portainer-ce:latest
msg_ok "Installed Portainer $PORTAINER_LATEST_VERSION"
else
read -r -p "${TAB3}Would you like to install the Portainer Agent (for remote management)? <y/N> " prompt_agent
if [[ ${prompt_agent,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing Portainer Agent $PORTAINER_AGENT_LATEST_VERSION"
read -r -p "${TAB3}Would you like to add the Portainer Agent? <y/N> " prompt
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing Portainer agent $PORTAINER_AGENT_LATEST_VERSION"
$STD docker run -d \
-p 9001:9001 \
--name portainer_agent \
@ -67,44 +57,6 @@ else
fi
fi
read -r -p "${TAB3}Expose Docker TCP socket (insecure) ? [n = No, l = Local only (127.0.0.1), a = All interfaces (0.0.0.0)] <n/l/a>: " socket_choice
case "${socket_choice,,}" in
l)
socket="tcp://127.0.0.1:2375"
;;
a)
socket="tcp://0.0.0.0:2375"
;;
*)
socket=""
;;
esac
if [[ -n "$socket" ]]; then
msg_info "Enabling Docker TCP socket on $socket"
$STD apt-get install -y jq
tmpfile=$(mktemp)
jq --arg sock "$socket" '. + { "hosts": ["unix:///var/run/docker.sock", $sock] }' /etc/docker/daemon.json > "$tmpfile" && mv "$tmpfile" /etc/docker/daemon.json
mkdir -p /etc/systemd/system/docker.service.d
cat <<EOF > /etc/systemd/system/docker.service.d/override.conf
[Service]
ExecStart=
ExecStart=/usr/bin/dockerd
EOF
$STD systemctl daemon-reexec
$STD systemctl daemon-reload
if systemctl restart docker; then
msg_ok "Docker TCP socket available on $socket"
else
msg_error "Docker failed to restart. Check journalctl -xeu docker.service"
exit 1
fi
fi
motd_ssh
customize

View File

@ -22,7 +22,6 @@ msg_ok "Installed Dependencies"
NODE_VERSION="22" NODE_MODULE="pnpm@$(curl -s https://raw.githubusercontent.com/docmost/docmost/main/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
PG_VERSION="16" setup_postgresql
fetch_and_deploy_gh_release "docmost" "docmost/docmost"
msg_info "Setting up PostgreSQL"
DB_NAME="docmost_db"
@ -41,7 +40,12 @@ $STD sudo -u postgres psql -c "ALTER ROLE $DB_USER SET timezone TO 'UTC'"
} >>~/docmost.creds
msg_ok "Set up PostgreSQL"
msg_info "Configuring Docmost (Patience)"
msg_info "Installing Docmost (Patience)"
temp_file=$(mktemp)
RELEASE=$(curl -fsSL https://api.github.com/repos/docmost/docmost/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/docmost/docmost/archive/refs/tags/v${RELEASE}.tar.gz" -o ""$temp_file""
tar -xzf "$temp_file"
mv docmost-${RELEASE} /opt/docmost
cd /opt/docmost
mv .env.example .env
mkdir data
@ -52,7 +56,8 @@ sed -i -e "s|APP_SECRET=.*|APP_SECRET=$(openssl rand -base64 32 | tr -dc 'a-zA-Z
export NODE_OPTIONS="--max-old-space-size=2048"
$STD pnpm install
$STD pnpm build
msg_ok "Configured Docmost"
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
msg_ok "Installed Docmost"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/docmost.service
@ -76,6 +81,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -24,15 +24,17 @@ if [[ "$CTTYPE" == "0" ]]; then
fi
msg_ok "Set Up Hardware Acceleration"
fetch_and_deploy_gh_release "emby" "MediaBrowser/Emby.Releases" "binary"
LATEST=$(curl -fsSL https://api.github.com/repos/MediaBrowser/Emby.Releases/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
msg_info "Configuring Emby"
msg_info "Installing Emby"
curl -fsSL "https://github.com/MediaBrowser/Emby.Releases/releases/download/${LATEST}/emby-server-deb_${LATEST}_amd64.deb" -o "emby-server-deb_${LATEST}_amd64.deb"
$STD dpkg -i emby-server-deb_${LATEST}_amd64.deb
if [[ "$CTTYPE" == "0" ]]; then
sed -i -e 's/^ssl-cert:x:104:$/render:x:104:root,emby/' -e 's/^render:x:108:root,emby$/ssl-cert:x:108:/' /etc/group
else
sed -i -e 's/^ssl-cert:x:104:$/render:x:104:emby/' -e 's/^render:x:108:emby$/ssl-cert:x:108:/' /etc/group
fi
msg_ok "Configured Emby"
msg_ok "Installed Emby"
motd_ssh
customize
@ -40,4 +42,5 @@ customize
msg_info "Cleaning up"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
rm emby-server-deb_${LATEST}_amd64.deb
msg_ok "Cleaned"

View File

@ -13,39 +13,16 @@ setting_up_container
network_check
update_os
msg_info "Installing dependencies"
$STD apt-get install -y ca-certificates
msg_ok "Installed dependencies"
msg_info "Fetching latest EMQX Enterprise version"
LATEST_VERSION=$(curl -fsSL https://www.emqx.com/en/downloads/enterprise | grep -oP '/en/downloads/enterprise/v\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -n1)
if [[ -z "$LATEST_VERSION" ]]; then
msg_error "Failed to determine latest EMQX version"
exit 1
fi
msg_ok "Latest version: v$LATEST_VERSION"
DOWNLOAD_URL="https://www.emqx.com/en/downloads/enterprise/v$LATEST_VERSION/emqx-enterprise-${LATEST_VERSION}-debian12-amd64.deb"
DEB_FILE="/tmp/emqx-enterprise-${LATEST_VERSION}-debian12-amd64.deb"
msg_info "Downloading EMQX v$LATEST_VERSION"
$STD curl -fsSL -o "$DEB_FILE" "$DOWNLOAD_URL"
msg_ok "Downloaded EMQX"
msg_info "Installing EMQX"
$STD apt-get install -y "$DEB_FILE"
echo "$LATEST_VERSION" >~/.emqx
$STD bash <(curl -fsSL https://packagecloud.io/install/repositories/emqx/emqx/script.deb.sh)
$STD apt-get install -y emqx
$STD systemctl enable --now emqx
msg_ok "Installed EMQX"
msg_info "Starting EMQX service"
$STD systemctl enable -q --now emqx
msg_ok "Enabled EMQX service"
motd_ssh
customize
msg_info "Cleaning up"
rm -f "$DEB_FILE"
$STD apt-get autoremove
$STD apt-get autoclean
apt-get autoremove >/dev/null
apt-get autoclean >/dev/null
msg_ok "Cleaned"

View File

@ -13,7 +13,15 @@ setting_up_container
network_check
update_os
FFMPEG_VERSION="latest" FFMPEG_TYPE="medium" setup_ffmpeg
msg_info "Installing FFmpeg (Patience)"
cd /usr/local/bin
curl -fsSL "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz" -o "ffmpeg-release-amd64-static.tar.xz"
$STD tar -xvf ffmpeg-release-amd64-static.tar.xz
rm -f ffmpeg-*.tar.xz
cd ffmpeg-*
mv ffmpeg ffprobe /usr/local/bin/
rm -rf /usr/local/bin/ffmpeg-*
msg_ok "Installed FFmpeg"
msg_info "Setting Up Hardware Acceleration"
$STD apt-get -y install {va-driver-all,ocl-icd-libopencl1,intel-opencl-icd,vainfo,intel-gpu-tools}
@ -26,7 +34,15 @@ if [[ "$CTTYPE" == "0" ]]; then
fi
msg_ok "Set Up Hardware Acceleration"
fetch_and_deploy_gh_release "ersatztv" "ErsatzTV/ErsatzTV" "prebuild" "latest" "/opt/ErsatzTV" "*linux-x64.tar.gz"
msg_info "Installing ErsatzTV"
temp_file=$(mktemp)
cd /opt
RELEASE=$(curl -fsSL https://api.github.com/repos/ErsatzTV/ErsatzTV/releases | grep -oP '"tag_name": "\K[^"]+' | head -n 1)
curl -fsSL "https://github.com/ErsatzTV/ErsatzTV/releases/download/${RELEASE}/ErsatzTV-${RELEASE}-linux-x64.tar.gz" -o "$temp_file"
tar -xzf "$temp_file"
mv /opt/ErsatzTV-${RELEASE}-linux-x64 /opt/ErsatzTV
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
msg_ok "Installed ErsatzTV"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/ersatzTV.service
@ -37,8 +53,8 @@ After=multi-user.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/ErsatzTV
ExecStart=/opt/ErsatzTV/ErsatzTV
WorkingDirectory=/opt/ErsatzTV
ExecStart=/opt/ErsatzTV/ErsatzTV
Restart=always
RestartSec=30
@ -52,6 +68,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f ${temp_file}
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -14,15 +14,21 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y xdg-utils
$STD apt-get install -y \
xdg-utils
msg_ok "Installed Dependencies"
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
fetch_and_deploy_gh_release "excalidraw" "excalidraw/excalidraw"
msg_info "Configuring Excalidraw"
msg_info "Setup Excalidraw"
temp_file=$(mktemp)
RELEASE=$(curl -fsSL https://api.github.com/repos/excalidraw/excalidraw/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/excalidraw/excalidraw/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar xzf $temp_file
mv excalidraw-${RELEASE} /opt/excalidraw
cd /opt/excalidraw
$STD yarn
echo "${RELEASE}" >/opt/excalidraw_version.txt
msg_ok "Setup Excalidraw"
msg_info "Creating Service"
@ -47,6 +53,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -14,14 +14,17 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y apache2
curl -fsSLo /usr/share/keyrings/deb.sury.org-php.gpg https://packages.sury.org/php/apt.gpg
echo "deb [signed-by=/usr/share/keyrings/deb.sury.org-php.gpg] https://packages.sury.org/php/ bookworm main" >/etc/apt/sources.list.d/php.list
$STD apt-get update
$STD apt-get install -y \
apache2 \
libapache2-mod-php8.4 \
php8.4-{bcmath,cli,intl,curl,zip,gd,xml,mbstring,mysql} \
composer
msg_ok "Installed Dependencies"
PHP_VERSION="8.4" PHP_APACHE="YES" PHP_MODULE="mysql" setup_php
setup_composer
setup_mariadb
fetch_and_deploy_gh_release "firefly" "firefly-iii/firefly-iii"
LOCAL_IP=$(hostname -I | awk '{print $1}')
msg_info "Setting up database"
DB_NAME=firefly
@ -38,7 +41,13 @@ mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH PRI
} >>~/firefly.creds
msg_ok "Set up database"
msg_info "Configuring Firefly III (Patience)"
msg_info "Installing Firefly III (Patience)"
LOCAL_IP=$(hostname -I | awk '{print $1}')
RELEASE=$(curl -fsSL https://api.github.com/repos/firefly-iii/firefly-iii/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4)}')
cd /opt
curl -fsSL "https://github.com/firefly-iii/firefly-iii/releases/download/v${RELEASE}/FireflyIII-v${RELEASE}.tar.gz" -o "FireflyIII-v${RELEASE}.tar.gz"
mkdir -p /opt/firefly
tar -xzf FireflyIII-v${RELEASE}.tar.gz -C /opt/firefly
chown -R www-data:www-data /opt/firefly
chmod -R 775 /opt/firefly/storage
cd /opt/firefly
@ -60,7 +69,8 @@ tar -xzf "DataImporter-v${IMPORTER_RELEASE}.tar.gz" -C /opt/firefly/dataimporter
cp /opt/firefly/dataimporter/.env.example /opt/firefly/dataimporter/.env
sed -i "s#FIREFLY_III_URL=#FIREFLY_III_URL=http://${LOCAL_IP}#g" /opt/firefly/dataimporter/.env
chown -R www-data:www-data /opt/firefly
msg_ok "Configured Firefly III"
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
msg_ok "Installed Firefly III"
msg_info "Creating Service"
cat <<EOF >/etc/apache2/sites-available/firefly.conf
@ -102,6 +112,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf "/opt/FireflyIII-v${RELEASE}.tar.gz"
rm -rf "/opt/DataImporter-v${IMPORTER_RELEASE}.tar.gz"
$STD apt-get -y autoremove
$STD apt-get -y autoclean

View File

@ -15,9 +15,8 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
apt-transport-https \
xvfb
$STD apt-get install -y apt-transport-https
$STD apt-get install -y xvfb
msg_ok "Installed Dependencies"
msg_info "Installing Chrome"
@ -27,7 +26,13 @@ $STD apt update
$STD apt install -y google-chrome-stable
msg_ok "Installed Chrome"
fetch_and_deploy_gh_release "flaresolverr" "FlareSolverr/FlareSolverr" "prebuild" "latest" "/opt/flaresolverr" "flaresolverr_linux_x64.tar.gz"
msg_info "Installing FlareSolverr"
RELEASE=$(curl -fsSL https://github.com/FlareSolverr/FlareSolverr/releases/latest | grep "title>Release" | cut -d " " -f 4)
$STD curl -fsSL "https://github.com/FlareSolverr/FlareSolverr/releases/download/$RELEASE/flaresolverr_linux_x64.tar.gz" -o "flaresolverr_linux_x64.tar.gz"
$STD tar -xzf flaresolverr_linux_x64.tar.gz -C /opt
$STD rm flaresolverr_linux_x64.tar.gz
echo "${RELEASE}" >/opt/"${APPLICATION}"_version.txt
msg_ok "Installed FlareSolverr"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/flaresolverr.service

View File

@ -14,11 +14,17 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y zip
$STD apt-get install -y \
zip \
postgresql-common
msg_ok "Installed Dependencies"
PG_VERSION="17" setup_postgresql
NODE_VERSION="20" setup_nodejs
msg_info "Installing Additional Dependencies"
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" >/etc/apt/sources.list.d/nodesource.list
echo "YES" | /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh &>/dev/null
$STD apt-get install -y postgresql-17 nodejs
msg_ok "Installed Additional Dependencies"
msg_info "Setting up Postgresql Database"
DB_NAME="fluiddb"
@ -38,9 +44,14 @@ $STD sudo -u postgres psql -c "ALTER USER $DB_USER WITH SUPERUSER;"
} >>~/$APPLICATION.creds
msg_ok "Set up Postgresql Database"
fetch_and_deploy_gh_release "fluid-calendar" "dotnetfactory/fluid-calendar"
msg_info "Setup ${APPLICATION}"
tmp_file=$(mktemp)
RELEASE=$(curl -fsSL https://api.github.com/repos/dotnetfactory/fluid-calendar/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/dotnetfactory/fluid-calendar/archive/refs/tags/v${RELEASE}.zip" -o "$tmp_file"
$STD unzip $tmp_file
mv ${APPLICATION}-${RELEASE}/ /opt/${APPLICATION}
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_info "Configuring ${APPLICATION}"
cat <<EOF >/opt/fluid-calendar/.env
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@localhost:5432/${DB_NAME}"
@ -61,7 +72,7 @@ $STD npm install --legacy-peer-deps
$STD npm run prisma:generate
$STD npx prisma migrate deploy
$STD npm run build:os
msg_ok "Configuring ${APPLICATION}"
msg_ok "Setup ${APPLICATION}"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/fluid-calendar.service
@ -84,6 +95,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f $tmp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -13,7 +13,13 @@ setting_up_container
network_check
update_os
PHP_VERSION="8.2" PHP_MODULE="curl,xml,mbstring,intl,zip,pgsql,gmp" PHP_APACHE="YES" setup_php
msg_info "Installing Dependencies"
$STD apt-get install -y \
apache2 \
php-{curl,dom,json,ctype,pgsql,gmp,mbstring,iconv,zip} \
libapache2-mod-php
msg_ok "Installed Dependencies"
PG_VERSION="16" setup_postgresql
msg_info "Setting up PostgreSQL"
@ -30,14 +36,17 @@ $STD sudo -u postgres psql -c "CREATE DATABASE $DB_NAME WITH OWNER $DB_USER TEMP
} >>~/freshrss.creds
msg_ok "Set up PostgreSQL"
fetch_and_deploy_gh_release "freshrss" "FreshRSS/FreshRSS"
msg_info "Configuring FreshRSS"
msg_info "Installing FreshRSS"
RELEASE=$(curl -fsSL https://api.github.com/repos/FreshRSS/FreshRSS/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
cd /opt
curl -fsSL "https://github.com/FreshRSS/FreshRSS/archive/refs/tags/${RELEASE}.zip" -o "${RELEASE}.zip"
$STD unzip "${RELEASE}.zip"
mv "/opt/FreshRSS-${RELEASE}" /opt/freshrss
cd /opt/freshrss
chown -R www-data:www-data /opt/freshrss
chmod -R g+rX /opt/freshrss
chmod -R g+w /opt/freshrss/data/
msg_ok "Configured FreshRSS"
msg_ok "Installed FreshRSS"
msg_info "Setting up cron job for feed refresh"
cat <<EOF >/etc/cron.d/freshrss-actualize
@ -74,6 +83,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf "/opt/${RELEASE}.zip"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -20,15 +20,20 @@ $STD apt-get install -y \
msg_ok "Installed Dependencies"
setup_go
fetch_and_deploy_gh_release "gatus" "TwiN/gatus"
msg_info "Configuring gatus"
RELEASE=$(curl -s https://api.github.com/repos/TwiN/gatus/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
msg_info "Setting up gatus v${RELEASE}"
temp_file=$(mktemp)
mkdir -p /opt/gatus
curl -fsSL "https://github.com/TwiN/gatus/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf "$temp_file" --strip-components=1 -C /opt/gatus
cd /opt/gatus
$STD go mod tidy
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o gatus .
setcap CAP_NET_RAW+ep gatus
mv config.yaml config
msg_ok "Configured gatus"
echo "${RELEASE}" >/opt/gatus_version.txt
msg_ok "Done setting up gatus"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/gatus.service
@ -53,6 +58,10 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f "$temp_file"
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"
motd_ssh
customize

View File

@ -16,19 +16,19 @@ update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
nginx \
ca-certificates \
libjemalloc2
ca-certificates
msg_ok "Installed Dependencies"
setup_mysql
setup_mariadb
msg_info "Configuring Database"
DB_NAME=ghost
DB_USER=ghostuser
DB_PASS=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
$STD mysql -u root -e "CREATE DATABASE $DB_NAME;"
$STD mysql -u root -e "CREATE USER '$DB_USER'@'localhost' IDENTIFIED BY '$DB_PASS';"
$STD mysql -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH PRIVILEGES;"
$STD mariadb -u root -e "CREATE DATABASE $DB_NAME;"
$STD mariadb -u root -e "CREATE USER '$DB_USER'@'localhost' IDENTIFIED BY '$DB_PASS';"
$STD mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH PRIVILEGES;"
{
echo "Ghost-Credentials"
echo "Ghost Database User: $DB_USER"
@ -37,7 +37,7 @@ $STD mysql -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUSH
} >>~/ghost.creds
msg_ok "Configured MySQL"
NODE_VERSION="22" setup_nodejs
NODE_VERSION="20" setup_nodejs
msg_info "Installing Ghost CLI"
$STD npm install ghost-cli@latest -g

View File

@ -19,9 +19,10 @@ $STD apt-get install -y git
$STD apt-get install -y sqlite3
msg_ok "Installed Dependencies"
fetch_and_deploy_gh_release "gitea" "go-gitea/gitea" "singlefile" "latest" "/usr/local/bin" "gitea-*-linux-amd64"
msg_info "Configuring Gitea"
msg_info "Installing Gitea"
RELEASE=$(curl -fsSL https://github.com/go-gitea/gitea/releases/latest | grep "title>Release" | cut -d " " -f 4 | sed 's/^v//')
curl -fsSL "https://github.com/go-gitea/gitea/releases/download/v$RELEASE/gitea-$RELEASE-linux-amd64" -o "gitea-$RELEASE-linux-amd64"
mv gitea* /usr/local/bin/gitea
chmod +x /usr/local/bin/gitea
adduser --system --group --disabled-password --shell /bin/bash --home /etc/gitea gitea >/dev/null
mkdir -p /var/lib/gitea/{custom,data,log}
@ -30,7 +31,7 @@ chmod -R 750 /var/lib/gitea/
chown root:gitea /etc/gitea
chmod 770 /etc/gitea
sudo -u gitea ln -s /var/lib/gitea/data/.ssh/ /etc/gitea/.ssh
msg_ok "Configured Gitea"
msg_ok "Installed Gitea"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/gitea.service

View File

@ -3,7 +3,7 @@
# Copyright (c) 2021-2025 community-scripts ORG
# Author: CrazyWolf13
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/RayLabsHQ/gitea-mirror
# Source: https://github.com/arunavo4/gitea-mirror
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
@ -28,7 +28,7 @@ ln -sf /opt/bun/bin/bun /usr/local/bin/bun
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
msg_ok "Installed Bun"
fetch_and_deploy_gh_release "gitea-mirror" "RayLabsHQ/gitea-mirror"
fetch_and_deploy_gh_release "gitea-mirror" "arunavo4/gitea-mirror"
msg_info "Installing gitea-mirror"
cd /opt/gitea-mirror

View File

@ -13,9 +13,12 @@ setting_up_container
network_check
update_os
fetch_and_deploy_gh_release "glance" "glanceapp/glance" "prebuild" "latest" "/opt/glance" "glance-linux-amd64.tar.gz"
msg_info "Configuring Glance"
msg_info "Installing Glance"
RELEASE=$(curl -fsSL https://api.github.com/repos/glanceapp/glance/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
cd /opt
curl -fsSL "https://github.com/glanceapp/glance/releases/download/v${RELEASE}/glance-linux-amd64.tar.gz" -o "glance-linux-amd64.tar.gz"
mkdir -p /opt/glance
tar -xzf glance-linux-amd64.tar.gz -C /opt/glance
cat <<EOF >/opt/glance/glance.yml
pages:
- name: Startpage
@ -36,7 +39,9 @@ pages:
- title: Helper Scripts
url: https://github.com/community-scripts/ProxmoxVE
EOF
msg_ok "Configured Glance"
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
msg_ok "Installed Glance"
msg_info "Creating Service"
service_path="/etc/systemd/system/glance.service"
@ -62,6 +67,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -rf /opt/glance-linux-amd64.tar.gz
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -22,16 +22,18 @@ curl -fsSL "http://archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1
$STD dpkg -i libssl1.1_1.1.1f-1ubuntu2_amd64.deb
msg_ok "Installed Dependencies"
NODE_VERSION="20" NODE_MODULE="gulp-cli,mocha" setup_nodejs
fetch_and_deploy_gh_release "habitica" "HabitRPG/habitica" "tarball" "latest" "/opt/habitica"
NODE_VERSION="20" setup_nodejs
msg_info "Setup ${APPLICATION}"
temp_file=$(mktemp)
RELEASE=$(curl -fsSL https://api.github.com/repos/HabitRPG/habitica/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/HabitRPG/habitica/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
tar zxf $temp_file
mv habitica-${RELEASE}/ /opt/habitica
cd /opt/habitica
$STD npm i
$STD npm run postinstall
$STD npm run client:build
$STD gulp build:prod
cp config.json.example config.json
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
msg_ok "Setup ${APPLICATION}"
msg_info "Creating Service"
@ -89,6 +91,7 @@ motd_ssh
customize
msg_info "Cleaning up"
rm -f $temp_file
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -66,7 +66,6 @@ $STD apt-get install --no-install-recommends -y \
mesa-vulkan-drivers \
ocl-icd-libopencl1 \
tini \
libaom-dev \
zlib1g
$STD apt-get install -y \
libgdk-pixbuf-2.0-dev librsvg2-dev libtool
@ -94,7 +93,7 @@ if [[ "$CTTYPE" == "0" ]]; then
fi
msg_ok "Dependencies Installed"
read -r -p "${TAB3}Install OpenVINO dependencies for Intel HW-accelerated machine-learning? y/N " prompt
read -r -p "Install OpenVINO dependencies for Intel HW-accelerated machine-learning? y/N " prompt
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
msg_info "Installing OpenVINO dependencies"
touch ~/.openvino
@ -154,6 +153,7 @@ if [[ -f ~/.openvino ]]; then
fi
msg_ok "Packages from Testing Repo Installed"
# Fix default DB collation issue after libc update
$STD sudo -u postgres psql -c "ALTER DATABASE postgres REFRESH COLLATION VERSION;"
$STD sudo -u postgres psql -c "ALTER DATABASE $DB_NAME REFRESH COLLATION VERSION;"
@ -218,7 +218,7 @@ $STD cmake --preset=release-noplugins \
-DWITH_LIBSHARPYUV=ON \
-DWITH_LIBDE265=ON \
-DWITH_AOM_DECODER=OFF \
-DWITH_AOM_ENCODER=ON \
-DWITH_AOM_ENCODER=OFF \
-DWITH_X265=OFF \
-DWITH_EXAMPLES=OFF \
..
@ -254,8 +254,7 @@ $STD make clean
cd "$STAGING_DIR"
SOURCE=$SOURCE_DIR/libvips
# : "${LIBVIPS_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libvips.json)}"
: "${LIBVIPS_REVISION:=8fa37a64547e392d3808eed8d72adab7e02b3d00}"
: "${LIBVIPS_REVISION:=$(jq -cr '.revision' $BASE_DIR/server/sources/libvips.json)}"
$STD git clone https://github.com/libvips/libvips.git "$SOURCE"
cd "$SOURCE"
$STD git reset --hard "$LIBVIPS_REVISION"
@ -302,10 +301,6 @@ cd "$SRC_DIR"
cp -a server/{node_modules,dist,bin,resources,package.json,package-lock.json,start*.sh} "$APP_DIR"/
cp -a web/build "$APP_DIR"/www
cp LICENSE "$APP_DIR"
cd "$APP_DIR"
export SHARP_FORCE_GLOBAL_LIBVIPS=true
$STD npm install sharp
rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-x64}
msg_ok "Installed Immich Web Components"
cd "$SRC_DIR"/machine-learning
@ -336,6 +331,8 @@ ln -s "$UPLOAD_DIR" "$APP_DIR"/upload
ln -s "$UPLOAD_DIR" "$ML_DIR"/upload
msg_info "Installing Immich CLI"
$STD npm install --build-from-source sharp
rm -rf "$APP_DIR"/node_modules/@img/sharp-{libvips*,linuxmusl-x64}
$STD npm i -g @immich/cli
msg_ok "Installed Immich CLI"

View File

@ -14,17 +14,22 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
ca-certificates \
build-essential
$STD apt-get install -y make
$STD apt-get install -y g++
$STD apt-get install -y gcc
$STD apt-get install -y ca-certificates
msg_ok "Installed Dependencies"
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
fetch_and_deploy_gh_release "mafl" "hywax/mafl"
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
msg_info "Installing Mafl v${RELEASE}"
curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o "v${RELEASE}.tar.gz"
tar -xzf v${RELEASE}.tar.gz
mkdir -p /opt/mafl/data
curl -fsSL "https://raw.githubusercontent.com/hywax/mafl/main/.example/config.yml" -o "/opt/mafl/data/config.yml"
mv mafl-${RELEASE}/* /opt/mafl
rm -rf mafl-${RELEASE}
cd /opt/mafl
export NUXT_TELEMETRY_DISABLED=true
$STD yarn install

View File

@ -45,12 +45,12 @@ fetch_and_deploy_gh_release "planka" "plankanban/planka" "prebuild" "latest" "/o
msg_info "Configuring PLANKA"
LOCAL_IP=$(hostname -I | awk '{print $1}')
SECRET_KEY=$(openssl rand -hex 64)
cd /opt/planka
cd /opt/planka/planka
$STD npm install
cp .env.sample .env
sed -i "s#http://localhost:1337#http://$LOCAL_IP:1337#g" /opt/planka/.env
sed -i "s#postgres@localhost#planka:$DB_PASS@localhost#g" /opt/planka/.env
sed -i "s#notsecretkey#$SECRET_KEY#g" /opt/planka/.env
sed -i "s#http://localhost:1337#http://$LOCAL_IP:1337#g" /opt/planka/planka/.env
sed -i "s#postgres@localhost#planka:$DB_PASS@localhost#g" /opt/planka/planka/.env
sed -i "s#notsecretkey#$SECRET_KEY#g" /opt/planka/planka/.env
$STD npm run db:init
msg_ok "Configured PLANKA"
@ -84,7 +84,7 @@ Description=planka Service
After=network.target
[Service]
WorkingDirectory=/opt/planka
WorkingDirectory=/opt/planka/planka
ExecStart=/usr/bin/npm start --prod
Restart=always

View File

@ -15,6 +15,7 @@ update_os
msg_info "Installing Dependencies (Patience)"
$STD apt-get install -y \
git \
automake \
autoconf \
libtool \
@ -24,26 +25,10 @@ $STD apt-get install -y \
make \
g++ \
unpaper \
fonts-urw-base35 \
qpdf \
poppler-utils
msg_ok "Installed Dependencies"
PYTHON_VERSION="3.12" setup_uv
JAVA_VERSION="21" setup_java
read -r -p "${TAB3}Do you want to Stirling-PDF with Login? (no/n = without Login) [Y/n] " response
response=${response,,} # Convert to lowercase
login_mode="false"
if [[ "$response" == "y" || "$response" == "yes" || -z "$response" ]]; then
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF-with-login.jar"
mv /opt/Stirling-PDF/Stirling-PDF-with-login.jar /opt/Stirling-PDF/Stirling-PDF.jar
touch ~/.Stirling-PDF-login
login_mode="true"
else
USE_ORIGINAL_FILENAME=true fetch_and_deploy_gh_release "stirling-pdf" "Stirling-Tools/Stirling-PDF" "singlefile" "latest" "/opt/Stirling-PDF" "Stirling-PDF.jar"
fi
msg_info "Installing LibreOffice Components"
$STD apt-get install -y \
libreoffice-writer \
@ -52,35 +37,32 @@ $STD apt-get install -y \
libreoffice-core \
libreoffice-common \
libreoffice-base-core \
libreoffice-script-provider-python \
libreoffice-java-common \
unoconv \
pngquant \
weasyprint
python3-uno
msg_ok "Installed LibreOffice Components"
msg_info "Installing Python Dependencies"
mkdir -p /tmp/stirling-pdf
$STD uv venv /opt/.venv
export PATH="/opt/.venv/bin:$PATH"
source /opt/.venv/bin/activate
$STD uv pip install --upgrade pip
$STD uv pip install \
$STD apt-get install -y \
python3 \
python3-pip
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
$STD pip3 install \
uno \
opencv-python-headless \
ocrmypdf \
pillow \
pdf2image
$STD apt-get install -y python3-uno python3-pip
$STD pip3 install --break-system-packages unoserver
ln -sf /opt/.venv/bin/python3 /usr/local/bin/python3
ln -sf /opt/.venv/bin/pip /usr/local/bin/pip
unoconv \
pngquant \
WeasyPrint
msg_ok "Installed Python Dependencies"
msg_info "Installing Azul Zulu"
curl -fsSL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xB1998361219BD9C9" -o "/etc/apt/trusted.gpg.d/zulu-repo.asc"
curl -fsSL "https://cdn.azul.com/zulu/bin/zulu-repo_1.0.0-3_all.deb" -o "/zulu-repo_1.0.0-3_all.deb"
$STD dpkg -i zulu-repo_1.0.0-3_all.deb
$STD apt-get update
$STD apt-get -y install zulu17-jdk
msg_ok "Installed Azul Zulu"
msg_info "Installing JBIG2"
$STD curl -fsSL -o /tmp/jbig2enc.tar.gz https://github.com/agl/jbig2enc/archive/refs/tags/0.30.tar.gz
mkdir -p /opt/jbig2enc
tar -xzf /tmp/jbig2enc.tar.gz -C /opt/jbig2enc --strip-components=1
$STD git clone https://github.com/agl/jbig2enc /opt/jbig2enc
cd /opt/jbig2enc
$STD bash ./autogen.sh
$STD bash ./configure
@ -92,46 +74,23 @@ msg_info "Installing Language Packs (Patience)"
$STD apt-get install -y 'tesseract-ocr-*'
msg_ok "Installed Language Packs"
msg_info "Creating Environment Variables"
cat <<EOF >/opt/Stirling-PDF/.env
# Java tuning
JAVA_BASE_OPTS="-XX:+UnlockExperimentalVMOptions -XX:MaxRAMPercentage=75 -XX:InitiatingHeapOccupancyPercent=20 -XX:+G1PeriodicGCInvokesConcurrent -XX:G1PeriodicGCInterval=10000 -XX:+UseStringDeduplication -XX:G1PeriodicGCSystemLoadThreshold=70"
JAVA_CUSTOM_OPTS=""
# LibreOffice
PATH=/opt/.venv/bin:/usr/lib/libreoffice/program:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
UNO_PATH=/usr/lib/libreoffice/program
URE_BOOTSTRAP=file:///usr/lib/libreoffice/program/fundamentalrc
PYTHONPATH=/usr/lib/libreoffice/program:/opt/.venv/lib/python3.12/site-packages
LD_LIBRARY_PATH=/usr/lib/libreoffice/program
STIRLING_TEMPFILES_DIRECTORY=/tmp/stirling-pdf
TMPDIR=/tmp/stirling-pdf
TEMP=/tmp/stirling-pdf
TMP=/tmp/stirling-pdf
# Paths
PATH=/opt/.venv/bin:/usr/lib/libreoffice/program:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
EOF
if [[ "$login_mode" == "true" ]]; then
cat <<EOF >>/opt/Stirling-PDF/.env
# activate Login
DISABLE_ADDITIONAL_FEATURES=false
SECURITY_ENABLELOGIN=true
# login credentials
SECURITY_INITIALLOGIN_USERNAME=admin
SECURITY_INITIALLOGIN_PASSWORD=stirling
EOF
fi
msg_ok "Created Environment Variables"
msg_info "Refreshing Font Cache"
$STD fc-cache -fv
msg_ok "Font Cache Updated"
msg_info "Installing Stirling-PDF (Additional Patience)"
RELEASE=$(curl -fsSL https://api.github.com/repos/Stirling-Tools/Stirling-PDF/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
curl -fsSL "https://github.com/Stirling-Tools/Stirling-PDF/archive/refs/tags/v${RELEASE}.tar.gz" -o "v${RELEASE}.tar.gz"
tar -xzf v${RELEASE}.tar.gz
cd Stirling-PDF-$RELEASE
chmod +x ./gradlew
$STD ./gradlew build
mkdir -p /opt/Stirling-PDF
touch /opt/Stirling-PDF/.env
mv ./build/libs/Stirling-PDF-*.jar /opt/Stirling-PDF/
mv scripts /opt/Stirling-PDF/
ln -s /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
ln -s /usr/share/tesseract-ocr/5/tessdata/ /usr/share/tessdata
msg_ok "Installed Stirling-PDF"
msg_info "Creating Service"
# Create LibreOffice listener service
cat <<EOF >/etc/systemd/system/libreoffice-listener.service
[Unit]
Description=LibreOffice Headless Listener Service
@ -148,6 +107,14 @@ Restart=always
WantedBy=multi-user.target
EOF
# Set up environment variables
cat <<EOF >/opt/Stirling-PDF/.env
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/lib/libreoffice/program
UNO_PATH=/usr/lib/libreoffice/program
PYTHONPATH=/usr/lib/python3/dist-packages:/usr/lib/libreoffice/program
LD_LIBRARY_PATH=/usr/lib/libreoffice/program
EOF
cat <<EOF >/etc/systemd/system/stirlingpdf.service
[Unit]
Description=Stirling-PDF service
@ -170,32 +137,16 @@ RestartSec=10
WantedBy=multi-user.target
EOF
cat <<EOF >/etc/systemd/system/unoserver.service
[Unit]
Description=UnoServer RPC Interface
After=libreoffice-listener.service
Requires=libreoffice-listener.service
[Service]
Type=simple
ExecStart=/usr/local/bin/unoserver --port 2003 --interface 127.0.0.1
Restart=always
EnvironmentFile=/opt/Stirling-PDF/.env
[Install]
WantedBy=multi-user.target
EOF
# Enable and start services
systemctl enable -q --now libreoffice-listener
systemctl enable -q --now stirlingpdf
systemctl enable -q --now unoserver
msg_ok "Created Service"
motd_ssh
customize
msg_info "Cleaning up"
rm -f /tmp/jbig2enc.tar.gz
rm -rf v${RELEASE}.tar.gz /zulu-repo_1.0.0-3_all.deb
$STD apt-get -y autoremove
$STD apt-get -y autoclean
msg_ok "Cleaned"

View File

@ -14,12 +14,16 @@ network_check
update_os
msg_info "Installing Dependencies"
$STD apt-get install -y \
ffmpeg \
vlc
$STD apt-get install -y ffmpeg
$STD apt-get install -y vlc
msg_ok "Installed Dependencies"
fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
msg_info "Installing Threadfin"
mkdir -p /opt/threadfin
curl -fsSL "https://github.com/Threadfin/Threadfin/releases/latest/download/Threadfin_linux_amd64" -o "/opt/threadfin/threadfin"
chmod +x /opt/threadfin/threadfin
msg_ok "Installed Threadfin"
msg_info "Creating Service"
cat <<EOF >/etc/systemd/system/threadfin.service

View File

@ -83,6 +83,11 @@ update_os() {
msg_info "Updating Container OS"
$STD apk -U upgrade
msg_ok "Updated Container OS"
msg_info "Installing core dependencies"
$STD apk update
$STD apk add newt curl openssh nano mc ncurses gpg
msg_ok "Core dependencies installed"
}
# This function modifies the message of the day (motd) and SSH settings

View File

@ -304,12 +304,13 @@ echo_default() {
fi
# Output the selected values with icons
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os ($var_version)${CL}"
echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os${CL}"
echo -e "${OSVERSION}${BOLD}${DGN}Version: ${BGN}$var_version${CL}"
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}"
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}${CORE_COUNT}${CL}"
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
if [ "$VERB" == "yes" ]; then
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}Enabled${CL}"
fi
@ -1094,9 +1095,7 @@ build_container() {
# This executes create_lxc.sh and creates the container and .conf file
bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/create_lxc.sh)" $?
LXC_CONFIG="/etc/pve/lxc/${CTID}.conf"
# USB passthrough for privileged LXC (CT_TYPE=0)
LXC_CONFIG=/etc/pve/lxc/${CTID}.conf
if [ "$CT_TYPE" == "0" ]; then
cat <<EOF >>"$LXC_CONFIG"
# USB passthrough
@ -1112,82 +1111,38 @@ lxc.mount.entry: /dev/ttyACM1 dev/ttyACM1 none bind,optional,create=
EOF
fi
# VAAPI passthrough for privileged containers or known apps
VAAPI_APPS=(
"immich"
"Channels"
"Emby"
"ErsatzTV"
"Frigate"
"Jellyfin"
"Plex"
"Scrypted"
"Tdarr"
"Unmanic"
"Ollama"
"FileFlows"
"Open WebUI"
)
is_vaapi_app=false
for vaapi_app in "${VAAPI_APPS[@]}"; do
if [[ "$APP" == "$vaapi_app" ]]; then
is_vaapi_app=true
break
if [ "$CT_TYPE" == "0" ]; then
if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
cat <<EOF >>"$LXC_CONFIG"
# VAAPI hardware transcoding
lxc.cgroup2.devices.allow: c 226:0 rwm
lxc.cgroup2.devices.allow: c 226:128 rwm
lxc.cgroup2.devices.allow: c 29:0 rwm
lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file
lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir
lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file
EOF
fi
done
if ([ "$CT_TYPE" == "0" ] || [ "$is_vaapi_app" == "true" ]) &&
([[ -e /dev/dri/renderD128 ]] || [[ -e /dev/dri/card0 ]] || [[ -e /dev/fb0 ]]); then
echo ""
msg_custom "⚙️ " "\e[96m" "Configuring VAAPI passthrough for LXC container"
if [ "$CT_TYPE" != "0" ]; then
msg_custom "⚠️ " "\e[33m" "Container is unprivileged VAAPI passthrough may not work without additional host configuration (e.g., idmap)."
fi
msg_custom " " "\e[96m" "VAAPI enables GPU hardware acceleration (e.g., for video transcoding in Jellyfin or Plex)."
echo ""
read -rp "➤ Automatically mount all available VAAPI devices? [Y/n]: " VAAPI_ALL
if [[ "$VAAPI_ALL" =~ ^[Yy]$|^$ ]]; then
if [ "$CT_TYPE" == "0" ]; then
# PRV Container → alles zulässig
[[ -e /dev/dri/renderD128 ]] && {
echo "lxc.cgroup2.devices.allow: c 226:128 rwm" >>"$LXC_CONFIG"
echo "lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file" >>"$LXC_CONFIG"
}
[[ -e /dev/dri/card0 ]] && {
echo "lxc.cgroup2.devices.allow: c 226:0 rwm" >>"$LXC_CONFIG"
echo "lxc.mount.entry: /dev/dri/card0 dev/dri/card0 none bind,optional,create=file" >>"$LXC_CONFIG"
}
[[ -e /dev/fb0 ]] && {
echo "lxc.cgroup2.devices.allow: c 29:0 rwm" >>"$LXC_CONFIG"
echo "lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file" >>"$LXC_CONFIG"
}
[[ -d /dev/dri ]] && {
echo "lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir" >>"$LXC_CONFIG"
}
else
# UNPRV Container → nur devX für UI
[[ -e /dev/dri/card0 ]] && echo "dev0: /dev/dri/card0,gid=44" >>"$LXC_CONFIG"
[[ -e /dev/dri/card1 ]] && echo "dev0: /dev/dri/card1,gid=44" >>"$LXC_CONFIG"
[[ -e /dev/dri/renderD128 ]] && echo "dev1: /dev/dri/renderD128,gid=104" >>"$LXC_CONFIG"
else
if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
if [[ -e "/dev/dri/renderD128" ]]; then
if [[ -e "/dev/dri/card0" ]]; then
cat <<EOF >>"$LXC_CONFIG"
# VAAPI hardware transcoding
dev0: /dev/dri/card0,gid=44
dev1: /dev/dri/renderD128,gid=104
EOF
else
cat <<EOF >>"$LXC_CONFIG"
# VAAPI hardware transcoding
dev0: /dev/dri/card1,gid=44
dev1: /dev/dri/renderD128,gid=104
EOF
fi
fi
fi
fi
if [ "$CT_TYPE" == "1" ] && [ "$is_vaapi_app" == "true" ]; then
if [[ -e /dev/dri/card0 ]]; then
echo "dev0: /dev/dri/card0,gid=44" >>"$LXC_CONFIG"
elif [[ -e /dev/dri/card1 ]]; then
echo "dev0: /dev/dri/card1,gid=44" >>"$LXC_CONFIG"
fi
if [[ -e /dev/dri/renderD128 ]]; then
echo "dev1: /dev/dri/renderD128,gid=104" >>"$LXC_CONFIG"
fi
fi
# TUN device passthrough
if [ "$ENABLE_TUN" == "yes" ]; then
cat <<EOF >>"$LXC_CONFIG"
lxc.cgroup2.devices.allow: c 10:200 rwm
@ -1217,13 +1172,10 @@ EOF'
locale-gen >/dev/null && \
export LANG=\$locale_line"
if [[ -z "${tz:-}" ]]; then
tz=$(timedatectl show --property=Timezone --value 2>/dev/null || echo "Etc/UTC")
fi
if pct exec "$CTID" -- test -e "/usr/share/zoneinfo/$tz"; then
pct exec "$CTID" -- bash -c "tz='$tz'; echo \"\$tz\" >/etc/timezone && ln -sf \"/usr/share/zoneinfo/\$tz\" /etc/localtime"
pct exec "$CTID" -- bash -c "echo $tz >/etc/timezone && ln -sf /usr/share/zoneinfo/$tz /etc/localtime"
else
msg_warn "Skipping timezone setup zone '$tz' not found in container"
msg_info "Skipping timezone setup zone '$tz' not found in container"
fi
pct exec "$CTID" -- bash -c "apt-get update >/dev/null && apt-get install -y sudo curl mc gnupg2 >/dev/null"
@ -1303,9 +1255,7 @@ api_exit_script() {
fi
}
if command -v pveversion >/dev/null 2>&1; then
trap 'api_exit_script' EXIT
fi
trap 'api_exit_script' EXIT
trap 'post_update_to_api "failed" "$BASH_COMMAND"' ERR
trap 'post_update_to_api "failed" "INTERRUPTED"' SIGINT
trap 'post_update_to_api "failed" "TERMINATED"' SIGTERM

View File

@ -1,6 +1,30 @@
# Copyright (c) 2021-2025 community-scripts ORG
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
# if ! declare -f wait_for >/dev/null; then
# echo "[DEBUG] Undefined function 'wait_for' used from: ${BASH_SOURCE[*]}" >&2
# wait_for() {
# echo "[DEBUG] Fallback: wait_for called with: $*" >&2
# true
# }
# fi
trap 'on_error $? $LINENO' ERR
trap 'on_exit' EXIT
trap 'on_interrupt' INT
trap 'on_terminate' TERM
if ! declare -f wait_for >/dev/null; then
wait_for() {
true
}
fi
declare -A MSG_INFO_SHOWN=()
SPINNER_PID=""
SPINNER_ACTIVE=0
SPINNER_MSG=""
# ------------------------------------------------------------------------------
# Loads core utility groups once (colors, formatting, icons, defaults).
# ------------------------------------------------------------------------------
@ -19,51 +43,100 @@ load_functions() {
# add more
}
# ============================================================================
# Error & Signal Handling robust, universal, subshell-safe
# ============================================================================
on_error() {
local exit_code="$1"
local lineno="$2"
_tool_error_hint() {
local cmd="$1"
local code="$2"
case "$cmd" in
curl)
case "$code" in
6) echo "Curl: Could not resolve host (DNS problem)" ;;
7) echo "Curl: Failed to connect to host (connection refused)" ;;
22) echo "Curl: HTTP error (404/403 etc)" ;;
28) echo "Curl: Operation timeout" ;;
*) echo "Curl: Unknown error ($code)" ;;
esac
stop_spinner
case "$exit_code" in
1) msg_error "Generic error occurred (line $lineno)" ;;
2) msg_error "Shell misuse (line $lineno)" ;;
126) msg_error "Command cannot execute (line $lineno)" ;;
127) msg_error "Command not found (line $lineno)" ;;
128) msg_error "Invalid exit argument (line $lineno)" ;;
130) msg_error "Script aborted by user (CTRL+C)" ;;
143) msg_error "Script terminated by SIGTERM" ;;
*) msg_error "Script failed at line $lineno with exit code $exit_code" ;;
esac
exit "$exit_code"
}
on_exit() {
cleanup_spinner || true
[[ "${VERBOSE:-no}" == "yes" ]] && msg_info "Script exited"
}
on_interrupt() {
msg_error "Interrupted by user (CTRL+C)"
exit 130
}
on_terminate() {
msg_error "Terminated by signal (TERM)"
exit 143
}
setup_trap_abort_handling() {
trap '__handle_signal_abort SIGINT' SIGINT
trap '__handle_signal_abort SIGTERM' SIGTERM
trap '__handle_unexpected_error $?' ERR
}
__handle_signal_abort() {
local signal="$1"
echo
[ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
case "$signal" in
SIGINT)
msg_error "Script aborted by user (CTRL+C)"
exit 130
;;
wget)
echo "Wget failed URL unreachable or permission denied"
SIGTERM)
msg_error "Script terminated (SIGTERM)"
exit 143
;;
systemctl)
echo "Systemd unit failure check service name and permissions"
*)
msg_error "Script interrupted (unknown signal: $signal)"
exit 1
;;
jq)
echo "jq parse error malformed JSON or missing key"
;;
mariadb | mysql)
echo "MySQL/MariaDB command failed check credentials or DB"
;;
unzip)
echo "unzip failed corrupt file or missing permission"
;;
tar)
echo "tar failed invalid format or missing binary"
;;
node | npm | pnpm | yarn)
echo "Node tool failed check version compatibility or package.json"
;;
*) echo "" ;;
esac
}
catch_errors() {
set -Eeuo pipefail
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
__handle_unexpected_error() {
local exit_code="$1"
echo
[ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
case "$exit_code" in
1)
msg_error "Generic error occurred (exit code 1)"
;;
2)
msg_error "Misuse of shell builtins (exit code 2)"
;;
126)
msg_error "Command invoked cannot execute (exit code 126)"
;;
127)
msg_error "Command not found (exit code 127)"
;;
128)
msg_error "Invalid exit argument (exit code 128)"
;;
130)
msg_error "Script aborted by user (CTRL+C)"
;;
143)
msg_error "Script terminated by SIGTERM"
;;
*)
msg_error "Unexpected error occurred (exit code $exit_code)"
;;
esac
exit "$exit_code"
}
# ------------------------------------------------------------------------------
@ -80,13 +153,6 @@ color() {
CL=$(echo "\033[m")
}
# Special for spinner and colorized output via printf
color_spinner() {
CS_YW=$'\033[33m'
CS_YWB=$'\033[93m'
CS_CL=$'\033[m'
}
# ------------------------------------------------------------------------------
# Defines formatting helpers like tab, bold, and line reset sequences.
# ------------------------------------------------------------------------------
@ -130,7 +196,6 @@ icons() {
ADVANCED="${TAB}🧩${TAB}${CL}"
FUSE="${TAB}🗂️${TAB}${CL}"
HOURGLASS="${TAB}${TAB}"
}
# ------------------------------------------------------------------------------
@ -162,7 +227,7 @@ silent() {
# Function to download & save header files
get_header() {
local app_name=$(echo "${APP,,}" | tr -d ' ')
local app_type=${APP_TYPE:-ct}
local app_type=${APP_TYPE:-ct} # Default 'ct'
local header_url="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/${app_type}/headers/${app_name}"
local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}"
@ -192,39 +257,77 @@ header_info() {
fi
}
ensure_tput() {
if ! command -v tput >/dev/null 2>&1; then
if grep -qi 'alpine' /etc/os-release; then
apk add --no-cache ncurses >/dev/null 2>&1
elif command -v apt-get >/dev/null 2>&1; then
apt-get update -qq >/dev/null
apt-get install -y -qq ncurses-bin >/dev/null 2>&1
fi
fi
}
# ------------------------------------------------------------------------------
# Performs a curl request with retry logic and inline feedback.
# ------------------------------------------------------------------------------
is_alpine() {
local os_id="${var_os:-${PCT_OSTYPE:-}}"
if [[ -z "$os_id" && -f /etc/os-release ]]; then
os_id="$(
. /etc/os-release 2>/dev/null
echo "${ID:-}"
)"
fi
[[ "$os_id" == "alpine" ]]
}
is_verbose_mode() {
local verbose="${VERBOSE:-${var_verbose:-no}}"
local tty_status
if [[ -t 2 ]]; then
tty_status="interactive"
run_curl() {
if [ "$VERBOSE" = "no" ]; then
$STD curl "$@"
else
tty_status="not-a-tty"
curl "$@"
fi
[[ "$verbose" != "no" || ! -t 2 ]]
}
curl_handler() {
set +e
trap 'set -e' RETURN
local args=()
local url=""
local max_retries=3
local delay=2
local attempt=1
local exit_code
local has_output_file=false
local result=""
# Parse arguments
for arg in "$@"; do
if [[ "$arg" != -* && -z "$url" ]]; then
url="$arg"
fi
[[ "$arg" == "-o" || "$arg" == --output ]] && has_output_file=true
args+=("$arg")
done
if [[ -z "$url" ]]; then
msg_error "No valid URL or option entered for curl_handler"
return 1
fi
$STD msg_info "Fetching: $url"
while [[ $attempt -le $max_retries ]]; do
if $has_output_file; then
$STD run_curl "${args[@]}"
exit_code=$?
else
result=$(run_curl "${args[@]}")
exit_code=$?
fi
if [[ $exit_code -eq 0 ]]; then
$STD msg_ok "Fetched: $url"
$has_output_file || printf '%s' "$result"
return 0
fi
if ((attempt >= max_retries)); then
# Read error log if it exists
if [ -s /tmp/curl_error.log ]; then
local curl_stderr
curl_stderr=$(</tmp/curl_error.log)
rm -f /tmp/curl_error.log
fi
__curl_err_handler "$exit_code" "$url" "${curl_stderr:-}"
exit
fi
$STD printf "\r\033[K${INFO}${YW}Retry $attempt/$max_retries in ${delay}s...${CL}" >&2
sleep "$delay"
((attempt++))
done
set -e
}
# ------------------------------------------------------------------------------
@ -269,92 +372,144 @@ fatal() {
kill -INT $$
}
spinner() {
local chars=(⠋ ⠙ ⠹ ⠸ ⠼ ⠴ ⠦ ⠧ ⠇ ⠏)
local i=0
while true; do
local index=$((i++ % ${#chars[@]}))
printf "\r\033[2K%s %b" "${CS_YWB}${chars[$index]}${CS_CL}" "${CS_YWB}${SPINNER_MSG:-}${CS_CL}"
sleep 0.1
done
}
# Ensure POSIX compatibility across Alpine and Debian/Ubuntu
# === Spinner Start ===
# Trap cleanup on various signals
trap 'cleanup_spinner' EXIT INT TERM HUP
clear_line() {
tput cr 2>/dev/null || echo -en "\r"
tput el 2>/dev/null || echo -en "\033[K"
}
spinner_frames=('⠋' '⠙' '⠹' '⠸' '⠼' '⠴' '⠦' '⠧' '⠇' '⠏')
stop_spinner() {
local pid="${SPINNER_PID:-}"
[[ -z "$pid" && -f /tmp/.spinner.pid ]] && pid=$(</tmp/.spinner.pid)
# === Spinner Start ===
start_spinner() {
local msg="$1"
local spin_i=0
local interval=0.1
if [[ -n "$pid" && "$pid" =~ ^[0-9]+$ ]]; then
if kill "$pid" 2>/dev/null; then
sleep 0.05
kill -9 "$pid" 2>/dev/null || true
wait "$pid" 2>/dev/null || true
fi
rm -f /tmp/.spinner.pid
stop_spinner
SPINNER_MSG="$msg"
SPINNER_ACTIVE=1
{
while [[ "$SPINNER_ACTIVE" -eq 1 ]]; do
if [[ -t 2 ]]; then
printf "\r\e[2K%s %b" "${TAB}${spinner_frames[spin_i]}${TAB}" "${YW}${SPINNER_MSG}${CL}" >&2
else
printf "%s...\n" "$SPINNER_MSG" >&2
break
fi
spin_i=$(((spin_i + 1) % ${#spinner_frames[@]}))
sleep "$interval"
done
} &
local pid=$!
if ps -p "$pid" >/dev/null 2>&1; then
SPINNER_PID="$pid"
else
SPINNER_ACTIVE=0
SPINNER_PID=""
fi
}
unset SPINNER_PID SPINNER_MSG
stty sane 2>/dev/null || true
# === Spinner Stop ===
stop_spinner() {
if [[ "$SPINNER_ACTIVE" -eq 1 && -n "$SPINNER_PID" ]]; then
SPINNER_ACTIVE=0
if kill -0 "$SPINNER_PID" 2>/dev/null; then
kill "$SPINNER_PID" 2>/dev/null || true
for _ in $(seq 1 10); do
sleep 0.05
kill -0 "$SPINNER_PID" 2>/dev/null || break
done
fi
if [[ "$SPINNER_PID" =~ ^[0-9]+$ ]]; then
ps -p "$SPINNER_PID" -o pid= >/dev/null 2>&1 && wait "$SPINNER_PID" 2>/dev/null || true
fi
printf "\r\e[2K" >&2
SPINNER_PID=""
fi
}
cleanup_spinner() {
stop_spinner
}
msg_info() {
local msg="$1"
[[ -z "$msg" ]] && return
if ! declare -p MSG_INFO_SHOWN &>/dev/null || ! declare -A MSG_INFO_SHOWN &>/dev/null; then
declare -gA MSG_INFO_SHOWN=()
fi
[[ -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
[[ -z "$msg" || -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
MSG_INFO_SHOWN["$msg"]=1
stop_spinner
SPINNER_MSG="$msg"
if is_verbose_mode || is_alpine; then
local HOURGLASS="${TAB}${TAB}"
if [[ "${VERBOSE:-no}" == "no" && -t 2 ]]; then
start_spinner "$msg"
else
printf "\r\e[2K%s %b" "$HOURGLASS" "${YW}${msg}${CL}" >&2
return
fi
color_spinner
spinner &
SPINNER_PID=$!
echo "$SPINNER_PID" >/tmp/.spinner.pid
disown "$SPINNER_PID" 2>/dev/null || true
}
msg_ok() {
local msg="$1"
[[ -z "$msg" ]] && return
stop_spinner
clear_line
printf "%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
printf "\r\e[2K%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
unset MSG_INFO_SHOWN["$msg"]
}
msg_error() {
stop_spinner
local msg="$1"
echo -e "${BFR:-} ${CROSS:-✖️} ${RD}${msg}${CL}"
[[ -z "$msg" ]] && return
stop_spinner
printf "\r\e[2K%s %b\n" "$CROSS" "${RD}${msg}${CL}" >&2
}
msg_warn() {
stop_spinner
local msg="$1"
echo -e "${BFR:-} ${INFO:-} ${YWB}${msg}${CL}"
[[ -z "$msg" ]] && return
stop_spinner
printf "\r\e[2K%s %b\n" "$INFO" "${YWB}${msg}${CL}" >&2
unset MSG_INFO_SHOWN["$msg"]
}
msg_custom() {
local symbol="${1:-"[*]"}"
local color="${2:-"\e[36m"}"
local color="${2:-"\e[36m"}" # Default: Cyan
local msg="${3:-}"
[[ -z "$msg" ]] && return
stop_spinner
echo -e "${BFR:-} ${symbol} ${color}${msg}${CL:-\e[0m}"
stop_spinner 2>/dev/null || true
printf "\r\e[2K%s %b\n" "$symbol" "${color}${msg}${CL:-\e[0m}" >&2
}
msg_progress() {
local current="$1"
local total="$2"
local label="$3"
local width=40
local filled percent bar empty
local fill_char="#"
local empty_char="-"
if ! [[ "$current" =~ ^[0-9]+$ ]] || ! [[ "$total" =~ ^[0-9]+$ ]] || [[ "$total" -eq 0 ]]; then
printf "\r\e[2K%s %b\n" "$CROSS" "${RD}Invalid progress input${CL}" >&2
return
fi
percent=$(((current * 100) / total))
filled=$(((current * width) / total))
empty=$((width - filled))
bar=$(printf "%${filled}s" | tr ' ' "$fill_char")
bar+=$(printf "%${empty}s" | tr ' ' "$empty_char")
printf "\r\e[2K%s [%s] %3d%% %s" "${TAB}" "$bar" "$percent" "$label" >&2
if [[ "$current" -eq "$total" ]]; then
printf "\n" >&2
fi
}
run_container_safe() {
@ -405,5 +560,3 @@ check_or_create_swap() {
return 1
fi
}
trap 'stop_spinner' EXIT INT TERM

View File

@ -21,67 +21,36 @@ fi
# This sets error handling options and defines the error_handler function to handle errors
set -Eeuo pipefail
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
trap on_exit EXIT
trap on_interrupt INT
trap on_terminate TERM
function on_exit() {
local exit_code="$?"
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
exit "$exit_code"
}
# This function handles errors
function error_handler() {
printf "\e[?25h"
local exit_code="$?"
local line_number="$1"
local command="$2"
printf "\e[?25h"
echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
exit "$exit_code"
}
function on_interrupt() {
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
exit 130
}
function on_terminate() {
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
exit 143
}
function check_storage_support() {
local CONTENT="$1"
local -a VALID_STORAGES=()
while IFS= read -r line; do
local STORAGE=$(awk '{print $1}' <<<"$line")
[[ "$STORAGE" == "storage" || -z "$STORAGE" ]] && continue
VALID_STORAGES+=("$STORAGE")
done < <(pvesm status -content "$CONTENT" 2>/dev/null | awk 'NR>1')
[[ ${#VALID_STORAGES[@]} -gt 0 ]]
local error_message="${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}"
echo -e "\n$error_message\n"
exit 200
}
# This checks for the presence of valid Container Storage and Template Storage locations
msg_info "Validating Storage"
if ! check_storage_support "rootdir"; then
msg_error "No valid storage found for 'rootdir' (Container)."
VALIDCT=$(pvesm status -content rootdir | awk 'NR>1')
if [ -z "$VALIDCT" ]; then
msg_error "Unable to detect a valid Container Storage location."
exit 1
fi
if ! check_storage_support "vztmpl"; then
msg_error "No valid storage found for 'vztmpl' (Template)."
VALIDTMP=$(pvesm status -content vztmpl | awk 'NR>1')
if [ -z "$VALIDTMP" ]; then
msg_error "Unable to detect a valid Template Storage location."
exit 1
fi
msg_ok "Validated Storage (rootdir / vztmpl)."
# This function is used to select the storage class and determine the corresponding storage content type and label.
function select_storage() {
local CLASS=$1 CONTENT CONTENT_LABEL
local CLASS=$1
local CONTENT
local CONTENT_LABEL
case $CLASS in
container)
CONTENT='rootdir'
@ -91,72 +60,51 @@ function select_storage() {
CONTENT='vztmpl'
CONTENT_LABEL='Container template'
;;
iso)
CONTENT='iso'
CONTENT_LABEL='ISO image'
;;
images)
CONTENT='images'
CONTENT_LABEL='VM Disk image'
;;
backup)
CONTENT='backup'
CONTENT_LABEL='Backup'
;;
snippets)
CONTENT='snippets'
CONTENT_LABEL='Snippets'
;;
*)
msg_error "Invalid storage class '$CLASS'"
return 1
;;
*) false || {
msg_error "Invalid storage class."
exit 201
} ;;
esac
local -a MENU
local -A STORAGE_MAP
local COL_WIDTH=0
# Collect storage options
local -a MENU
local MSG_MAX_LENGTH=0
while read -r TAG TYPE _ TOTAL USED FREE _; do
[[ -n "$TAG" && -n "$TYPE" ]] || continue
local DISPLAY="${TAG} (${TYPE})"
local USED_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$USED")
local FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$FREE")
local INFO="Free: ${FREE_FMT}B Used: ${USED_FMT}B"
STORAGE_MAP["$DISPLAY"]="$TAG"
MENU+=("$DISPLAY" "$INFO" "OFF")
((${#DISPLAY} > COL_WIDTH)) && COL_WIDTH=${#DISPLAY}
while read -r TAG TYPE _ _ _ FREE _; do
local TYPE_PADDED
local FREE_FMT
TYPE_PADDED=$(printf "%-10s" "$TYPE")
FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.2f <<<"$FREE")B
local ITEM="Type: $TYPE_PADDED Free: $FREE_FMT"
((${#ITEM} + 2 > MSG_MAX_LENGTH)) && MSG_MAX_LENGTH=$((${#ITEM} + 2))
MENU+=("$TAG" "$ITEM" "OFF")
done < <(pvesm status -content "$CONTENT" | awk 'NR>1')
if [ ${#MENU[@]} -eq 0 ]; then
msg_error "No storage found for content type '$CONTENT'."
return 2
fi
local OPTION_COUNT=$((${#MENU[@]} / 3))
if [ $((${#MENU[@]} / 3)) -eq 1 ]; then
STORAGE_RESULT="${STORAGE_MAP[${MENU[0]}]}"
# Auto-select if only one option available
if [[ "$OPTION_COUNT" -eq 1 ]]; then
echo "${MENU[0]}"
return 0
fi
local WIDTH=$((COL_WIDTH + 42))
while true; do
local DISPLAY_SELECTED=$(whiptail --backtitle "Proxmox VE Helper Scripts" \
--title "Storage Pools" \
--radiolist "Which storage pool for ${CONTENT_LABEL,,}?\n(Spacebar to select)" \
16 "$WIDTH" 6 "${MENU[@]}" 3>&1 1>&2 2>&3)
[[ $? -ne 0 ]] && return 3
if [[ -z "$DISPLAY_SELECTED" || -z "${STORAGE_MAP[$DISPLAY_SELECTED]+_}" ]]; then
whiptail --msgbox "No valid storage selected. Please try again." 8 58
continue
fi
STORAGE_RESULT="${STORAGE_MAP[$DISPLAY_SELECTED]}"
return 0
# Display selection menu
local STORAGE
while [[ -z "${STORAGE:+x}" ]]; do
STORAGE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Storage Pools" --radiolist \
"Select the storage pool to use for the ${CONTENT_LABEL,,}.\nUse the spacebar to make a selection.\n" \
16 $((MSG_MAX_LENGTH + 23)) 6 \
"${MENU[@]}" 3>&1 1>&2 2>&3) || {
msg_error "Storage selection cancelled."
exit 202
}
done
}
echo "$STORAGE"
}
# Test if required variables are set
[[ "${CTID:-}" ]] || {
msg_error "You need to set 'CTID' variable."
@ -181,55 +129,13 @@ if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then
exit 206
fi
# DEFAULT_FILE="/usr/local/community-scripts/default_storage"
# if [[ -f "$DEFAULT_FILE" ]]; then
# source "$DEFAULT_FILE"
# if [[ -n "$TEMPLATE_STORAGE" && -n "$CONTAINER_STORAGE" ]]; then
# msg_info "Using default storage configuration from: $DEFAULT_FILE"
# msg_ok "Template Storage: ${BL}$TEMPLATE_STORAGE${CL} ${GN}|${CL} Container Storage: ${BL}$CONTAINER_STORAGE${CL}"
# else
# msg_warn "Default storage file exists but is incomplete falling back to manual selection"
# TEMPLATE_STORAGE=$(select_storage template)
# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
# CONTAINER_STORAGE=$(select_storage container)
# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
# fi
# else
# # TEMPLATE STORAGE SELECTION
# # Template Storage
# while true; do
# TEMPLATE_STORAGE=$(select_storage template)
# if [[ -n "$TEMPLATE_STORAGE" ]]; then
# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
# break
# fi
# msg_warn "No valid template storage selected. Please try again."
# done
# Get template storage
TEMPLATE_STORAGE=$(select_storage template)
msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
# while true; do
# CONTAINER_STORAGE=$(select_storage container)
# if [[ -n "$CONTAINER_STORAGE" ]]; then
# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
# break
# fi
# msg_warn "No valid container storage selected. Please try again."
# done
# fi
while true; do
if select_storage template; then
TEMPLATE_STORAGE="$STORAGE_RESULT"
break
fi
done
while true; do
if select_storage container; then
CONTAINER_STORAGE="$STORAGE_RESULT"
break
fi
done
# Get container storage
CONTAINER_STORAGE=$(select_storage container)
msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
# Check free space on selected container storage
STORAGE_FREE=$(pvesm status | awk -v s="$CONTAINER_STORAGE" '$1 == s { print $6 }')
@ -253,7 +159,7 @@ fi
TEMPLATE_SEARCH="${PCT_OSTYPE}-${PCT_OSVERSION:-}"
msg_info "Updating LXC Template List"
if ! pveam update >/dev/null 2>&1; then
if ! timeout 15 pveam update >/dev/null 2>&1; then
TEMPLATE_FALLBACK=$(pveam list "$TEMPLATE_STORAGE" | awk "/$TEMPLATE_SEARCH/ {print \$2}" | sort -t - -k 2 -V | tail -n1)
if [[ -z "$TEMPLATE_FALLBACK" ]]; then
msg_error "Failed to update LXC template list and no local template matching '$TEMPLATE_SEARCH' found."
@ -284,7 +190,7 @@ if ! pveam list "$TEMPLATE_STORAGE" | grep -q "$TEMPLATE" || ! zstdcat "$TEMPLAT
for attempt in {1..3}; do
msg_info "Attempt $attempt: Downloading LXC template..."
if pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null 2>&1; then
if timeout 120 pveam download "$TEMPLATE_STORAGE" "$TEMPLATE" >/dev/null 2>&1; then
msg_ok "Template download successful."
break
fi
@ -298,7 +204,7 @@ if ! pveam list "$TEMPLATE_STORAGE" | grep -q "$TEMPLATE" || ! zstdcat "$TEMPLAT
done
fi
msg_info "Creating LXC Container"
msg_ok "LXC Template '$TEMPLATE' is ready to use."
# Check and fix subuid/subgid
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
@ -309,15 +215,12 @@ PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}})
# Secure creation of the LXC container with lock and template check
lockfile="/tmp/template.${TEMPLATE}.lock"
exec 9>"$lockfile" >/dev/null 2>&1 || {
msg_error "Failed to create lock file '$lockfile'."
exit 200
}
exec 9>"$lockfile"
flock -w 60 9 || {
msg_error "Timeout while waiting for template lock"
exit 211
}
msg_info "Creating LXC Container"
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
msg_error "Container creation failed. Checking if template is corrupted or incomplete."
@ -349,23 +252,16 @@ if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[
sleep 1 # I/O-Sync-Delay
msg_ok "Re-downloaded LXC Template"
fi
if ! pct list | awk '{print $1}' | grep -qx "$CTID"; then
msg_error "Container ID $CTID not listed in 'pct list' unexpected failure."
exit 215
fi
if ! grep -q '^rootfs:' "/etc/pve/lxc/$CTID.conf"; then
msg_error "RootFS entry missing in container config storage not correctly assigned."
exit 216
fi
if grep -q '^hostname:' "/etc/pve/lxc/$CTID.conf"; then
CT_HOSTNAME=$(grep '^hostname:' "/etc/pve/lxc/$CTID.conf" | awk '{print $2}')
if [[ ! "$CT_HOSTNAME" =~ ^[a-z0-9-]+$ ]]; then
msg_warn "Hostname '$CT_HOSTNAME' contains invalid characters may cause issues with networking or DNS."
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
msg_error "Container creation failed after re-downloading template."
exit 200
fi
fi
if ! pct status "$CTID" &>/dev/null; then
msg_error "Container not found after pct create assuming failure."
exit 210
fi
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."

View File

@ -62,11 +62,9 @@ setting_up_container() {
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
systemctl disable -q --now systemd-networkd-wait-online.service
msg_ok "Set up Container OS"
#msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
msg_ok "Network Connected: ${BL}$(hostname -I)"
msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
}
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
network_check() {
set +e
@ -74,7 +72,6 @@ network_check() {
ipv4_connected=false
ipv6_connected=false
sleep 1
# Check IPv4 connectivity to Google, Cloudflare & Quad9 DNS servers.
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
msg_ok "IPv4 Internet Connected"
@ -103,26 +100,25 @@ network_check() {
fi
# DNS resolution checks for GitHub-related domains (IPv4 and/or IPv6)
GIT_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com" "git.community-scripts.org")
GIT_STATUS="Git DNS:"
GITHUB_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com")
GITHUB_STATUS="GitHub DNS:"
DNS_FAILED=false
for HOST in "${GIT_HOSTS[@]}"; do
for HOST in "${GITHUB_HOSTS[@]}"; do
RESOLVEDIP=$(getent hosts "$HOST" | awk '{ print $1 }' | grep -E '(^([0-9]{1,3}\.){3}[0-9]{1,3}$)|(^[a-fA-F0-9:]+$)' | head -n1)
if [[ -z "$RESOLVEDIP" ]]; then
GIT_STATUS+="$HOST:($DNSFAIL)"
GITHUB_STATUS+="$HOST:($DNSFAIL)"
DNS_FAILED=true
else
GIT_STATUS+=" $HOST:($DNSOK)"
GITHUB_STATUS+=" $HOST:($DNSOK)"
fi
done
if [[ "$DNS_FAILED" == true ]]; then
fatal "$GIT_STATUS"
fatal "$GITHUB_STATUS"
else
msg_ok "$GIT_STATUS"
msg_ok "$GITHUB_STATUS"
fi
set -e
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
}

View File

@ -54,14 +54,9 @@ function setup_nodejs() {
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" \
>/etc/apt/sources.list.d/nodesource.list
sleep 2
if ! apt-get update >/dev/null 2>&1; then
msg_warn "APT update failed retrying in 5s"
sleep 5
if ! apt-get update >/dev/null 2>&1; then
msg_error "Failed to update APT repositories after adding NodeSource"
exit 1
fi
msg_error "Failed to update APT repositories after adding NodeSource"
exit 1
fi
if ! apt-get install -y nodejs >/dev/null 2>&1; then
@ -244,14 +239,10 @@ setup_mariadb() {
DISTRO_CODENAME="$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)"
CURRENT_OS="$(awk -F= '/^ID=/{print $2}' /etc/os-release)"
if ! curl -fsI http://mirror.mariadb.org/repo/ >/dev/null; then
msg_error "MariaDB mirror not reachable"
return 1
fi
msg_info "Setting up MariaDB $MARIADB_VERSION"
# grab dynamic latest LTS version
if [[ "$MARIADB_VERSION" == "latest" ]]; then
$STD msg_info "Resolving latest GA MariaDB version"
MARIADB_VERSION=$(curl -fsSL http://mirror.mariadb.org/repo/ |
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
grep -vE 'rc/|rolling/' |
@ -262,6 +253,7 @@ setup_mariadb() {
msg_error "Could not determine latest GA MariaDB version"
return 1
fi
$STD msg_ok "Latest GA MariaDB version is $MARIADB_VERSION"
fi
local CURRENT_VERSION=""
@ -286,6 +278,7 @@ setup_mariadb() {
$STD msg_info "Setup MariaDB $MARIADB_VERSION"
fi
$STD msg_info "Setting up MariaDB Repository"
curl -fsSL "https://mariadb.org/mariadb_release_signing_key.asc" |
gpg --dearmor -o /etc/apt/trusted.gpg.d/mariadb.gpg
@ -373,6 +366,25 @@ function setup_mysql() {
# PHP_MAX_EXECUTION_TIME - (default: 300)
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# Installs PHP with selected modules and configures Apache/FPM support.
#
# Description:
# - Adds Sury PHP repo if needed
# - Installs default and user-defined modules
# - Patches php.ini for CLI, Apache, and FPM as needed
#
# Variables:
# PHP_VERSION - PHP version to install (default: 8.4)
# PHP_MODULE - Additional comma-separated modules
# PHP_APACHE - Set YES to enable PHP with Apache
# PHP_FPM - Set YES to enable PHP-FPM
# PHP_MEMORY_LIMIT - (default: 512M)
# PHP_UPLOAD_MAX_FILESIZE - (default: 128M)
# PHP_POST_MAX_SIZE - (default: 128M)
# PHP_MAX_EXECUTION_TIME - (default: 300)
# ------------------------------------------------------------------------------
function setup_php() {
local PHP_VERSION="${PHP_VERSION:-8.4}"
local PHP_MODULE="${PHP_MODULE:-}"
@ -421,13 +433,6 @@ function setup_php() {
fi
local MODULE_LIST="php${PHP_VERSION}"
for pkg in $MODULE_LIST; do
if ! apt-cache show "$pkg" >/dev/null 2>&1; then
msg_error "Package not found: $pkg"
exit 1
fi
done
IFS=',' read -ra MODULES <<<"$COMBINED_MODULES"
for mod in "${MODULES[@]}"; do
MODULE_LIST+=" php${PHP_VERSION}-${mod}"
@ -436,17 +441,11 @@ function setup_php() {
if [[ "$PHP_FPM" == "YES" ]]; then
MODULE_LIST+=" php${PHP_VERSION}-fpm"
fi
if [[ "$PHP_APACHE" == "YES" ]]; then
$STD apt-get install -y apache2 libapache2-mod-php${PHP_VERSION}
$STD systemctl restart apache2 || true
fi
if [[ "$PHP_APACHE" == "YES" ]] && [[ -n "$CURRENT_PHP" ]]; then
if [[ -f /etc/apache2/mods-enabled/php${CURRENT_PHP}.load ]]; then
$STD a2dismod php${CURRENT_PHP} || true
fi
$STD a2enmod php${PHP_VERSION}
$STD systemctl restart apache2 || true
fi
if [[ "$PHP_FPM" == "YES" ]] && [[ -n "$CURRENT_PHP" ]]; then
@ -457,6 +456,10 @@ function setup_php() {
$STD apt-get install -y $MODULE_LIST
msg_ok "Setup PHP $PHP_VERSION"
if [[ "$PHP_APACHE" == "YES" ]]; then
$STD systemctl restart apache2 || true
fi
if [[ "$PHP_FPM" == "YES" ]]; then
$STD systemctl enable php${PHP_VERSION}-fpm
$STD systemctl restart php${PHP_VERSION}-fpm
@ -646,15 +649,6 @@ function setup_mongodb() {
DISTRO_ID=$(awk -F= '/^ID=/{ gsub(/"/,"",$2); print $2 }' /etc/os-release)
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{ print $2 }' /etc/os-release)
# Check AVX support
if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then
local major="${MONGO_VERSION%%.*}"
if ((major > 5)); then
msg_error "MongoDB ${MONGO_VERSION} requires AVX support, which is not available on this system."
return 1
fi
fi
case "$DISTRO_ID" in
ubuntu)
MONGO_BASE_URL="https://repo.mongodb.org/apt/ubuntu"
@ -763,7 +757,6 @@ function fetch_and_deploy_gh_release() {
local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile
local version="${4:-latest}"
local target="${5:-/opt/$app}"
local asset_pattern="${6:-}"
local app_lc=$(echo "${app,,}" | tr -d ' ')
local version_file="$HOME/.${app_lc}"
@ -858,9 +851,9 @@ function fetch_and_deploy_gh_release() {
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
# If explicit filename pattern is provided (param $6), match that first
if [[ -n "$asset_pattern" ]]; then
if [[ -n "$6" ]]; then
for u in $assets; do
[[ "$u" =~ $asset_pattern || "$u" == *"$asset_pattern" ]] && url_match="$u" && break
[[ "$u" =~ $6 || "$u" == *"$6" ]] && url_match="$u" && break
done
fi
@ -903,7 +896,7 @@ function fetch_and_deploy_gh_release() {
}
}
### Prebuild Mode ###
### Prebuild Mode ###
elif [[ "$mode" == "prebuild" ]]; then
local pattern="${6%\"}"
pattern="${pattern#\"}"
@ -922,6 +915,7 @@ function fetch_and_deploy_gh_release() {
break
;;
esac
done
[[ -z "$asset_url" ]] && {
@ -937,42 +931,20 @@ function fetch_and_deploy_gh_release() {
return 1
}
local unpack_tmp
unpack_tmp=$(mktemp -d)
mkdir -p "$target"
if [[ "$filename" == *.zip ]]; then
if ! command -v unzip &>/dev/null; then
$STD apt-get install -y unzip
fi
unzip -q "$tmpdir/$filename" -d "$unpack_tmp"
$STD unzip "$tmpdir/$filename" -d "$target"
elif [[ "$filename" == *.tar.* ]]; then
tar -xf "$tmpdir/$filename" -C "$unpack_tmp"
tar --strip-components=1 -xf "$tmpdir/$filename" -C "$target"
else
msg_error "Unsupported archive format: $filename"
rm -rf "$tmpdir" "$unpack_tmp"
rm -rf "$tmpdir"
return 1
fi
local top_dirs
top_dirs=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d | wc -l)
if [[ "$top_dirs" -eq 1 ]]; then
# Strip leading folder
local inner_dir
inner_dir=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1 -type d)
shopt -s dotglob nullglob
cp -r "$inner_dir"/* "$target/"
shopt -u dotglob nullglob
else
# Copy all contents
shopt -s dotglob nullglob
cp -r "$unpack_tmp"/* "$target/"
shopt -u dotglob nullglob
fi
rm -rf "$unpack_tmp"
### Singlefile Mode ###
elif [[ "$mode" == "singlefile" ]]; then
local pattern="${6%\"}"
@ -1002,20 +974,13 @@ function fetch_and_deploy_gh_release() {
filename="${asset_url##*/}"
mkdir -p "$target"
local use_filename="${USE_ORIGINAL_FILENAME:-false}"
local target_file="$app"
[[ "$use_filename" == "true" ]] && target_file="$filename"
curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || {
curl $download_timeout -fsSL -o "$target/$app" "$asset_url" || {
msg_error "Download failed: $asset_url"
rm -rf "$tmpdir"
return 1
}
if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
chmod +x "$target/$target_file"
fi
chmod +x "$target/$app"
else
msg_error "Unknown mode: $mode"
@ -1694,154 +1659,3 @@ function setup_imagemagick() {
ensure_usr_local_bin_persist
msg_ok "Setup ImageMagick $VERSION"
}
# ------------------------------------------------------------------------------
# Installs FFmpeg from source or prebuilt binary (Debian/Ubuntu only).
#
# Description:
# - Downloads and builds FFmpeg from GitHub (https://github.com/FFmpeg/FFmpeg)
# - Supports specific version override via FFMPEG_VERSION (e.g. n7.1.1)
# - Supports build profile via FFMPEG_TYPE:
# - minimal : x264, vpx, mp3 only
# - medium : adds subtitles, fonts, opus, vorbis
# - full : adds dav1d, svt-av1, zlib, numa
# - binary : downloads static build (johnvansickle.com)
# - Defaults to latest stable version and full feature set
#
# Notes:
# - Requires: curl, jq, build-essential, and matching codec libraries
# - Result is installed to /usr/local/bin/ffmpeg
# ------------------------------------------------------------------------------
function setup_ffmpeg() {
local TMP_DIR
TMP_DIR=$(mktemp -d)
local GITHUB_REPO="FFmpeg/FFmpeg"
local VERSION="${FFMPEG_VERSION:-latest}"
local TYPE="${FFMPEG_TYPE:-full}"
local BIN_PATH="/usr/local/bin/ffmpeg"
# Binary fallback mode
if [[ "$TYPE" == "binary" ]]; then
msg_info "Installing FFmpeg (static binary)"
curl -fsSL https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz -o "$TMP_DIR/ffmpeg.tar.xz"
tar -xf "$TMP_DIR/ffmpeg.tar.xz" -C "$TMP_DIR"
local EXTRACTED_DIR
EXTRACTED_DIR=$(find "$TMP_DIR" -maxdepth 1 -type d -name "ffmpeg-*")
cp "$EXTRACTED_DIR/ffmpeg" "$BIN_PATH"
cp "$EXTRACTED_DIR/ffprobe" /usr/local/bin/ffprobe
chmod +x "$BIN_PATH" /usr/local/bin/ffprobe
rm -rf "$TMP_DIR"
msg_ok "Installed FFmpeg binary ($($BIN_PATH -version | head -n1))"
return
fi
if ! command -v jq &>/dev/null; then
$STD apt-get update
$STD apt-get install -y jq
fi
# Auto-detect latest stable version if none specified
if [[ "$VERSION" == "latest" || -z "$VERSION" ]]; then
msg_info "Resolving latest FFmpeg tag"
VERSION=$(curl -fsSL "https://api.github.com/repos/${GITHUB_REPO}/tags" |
jq -r '.[].name' |
grep -E '^n[0-9]+\.[0-9]+\.[0-9]+$' |
sort -V | tail -n1)
fi
if [[ -z "$VERSION" ]]; then
msg_error "Could not determine FFmpeg version"
rm -rf "$TMP_DIR"
return 1
fi
msg_info "Installing FFmpeg ${VERSION} ($TYPE)"
# Dependency selection
local DEPS=(build-essential yasm nasm pkg-config)
case "$TYPE" in
minimal)
DEPS+=(libx264-dev libvpx-dev libmp3lame-dev)
;;
medium)
DEPS+=(libx264-dev libvpx-dev libmp3lame-dev libfreetype6-dev libass-dev libopus-dev libvorbis-dev)
;;
full)
DEPS+=(
libx264-dev libx265-dev libvpx-dev libmp3lame-dev
libfreetype6-dev libass-dev libopus-dev libvorbis-dev
libdav1d-dev libsvtav1-dev zlib1g-dev libnuma-dev
)
;;
*)
msg_error "Invalid FFMPEG_TYPE: $TYPE"
rm -rf "$TMP_DIR"
return 1
;;
esac
$STD apt-get update
$STD apt-get install -y "${DEPS[@]}"
curl -fsSL "https://github.com/${GITHUB_REPO}/archive/refs/tags/${VERSION}.tar.gz" -o "$TMP_DIR/ffmpeg.tar.gz"
tar -xzf "$TMP_DIR/ffmpeg.tar.gz" -C "$TMP_DIR"
cd "$TMP_DIR/FFmpeg-"* || {
msg_error "Source extraction failed"
rm -rf "$TMP_DIR"
return 1
}
local args=(
--enable-gpl
--enable-shared
--enable-nonfree
--disable-static
--enable-libx264
--enable-libvpx
--enable-libmp3lame
)
if [[ "$TYPE" != "minimal" ]]; then
args+=(--enable-libfreetype --enable-libass --enable-libopus --enable-libvorbis)
fi
if [[ "$TYPE" == "full" ]]; then
args+=(--enable-libx265 --enable-libdav1d --enable-zlib)
fi
if [[ ${#args[@]} -eq 0 ]]; then
msg_error "FFmpeg configure args array is empty aborting."
rm -rf "$TMP_DIR"
return 1
fi
./configure "${args[@]}" >"$TMP_DIR/configure.log" 2>&1 || {
msg_error "FFmpeg ./configure failed (see $TMP_DIR/configure.log)"
cat "$TMP_DIR/configure.log" | tail -n 20
rm -rf "$TMP_DIR"
return 1
}
$STD make -j"$(nproc)"
$STD make install
echo "/usr/local/lib" >/etc/ld.so.conf.d/ffmpeg.conf
ldconfig
ldconfig -p | grep libavdevice >/dev/null || {
msg_error "libavdevice not registered with dynamic linker"
return 1
}
if ! command -v ffmpeg &>/dev/null; then
msg_error "FFmpeg installation failed"
rm -rf "$TMP_DIR"
return 1
fi
local FINAL_VERSION
FINAL_VERSION=$(ffmpeg -version | head -n1 | awk '{print $3}')
rm -rf "$TMP_DIR"
ensure_usr_local_bin_persist
msg_ok "Setup FFmpeg $FINAL_VERSION"
}

View File

@ -162,21 +162,6 @@ update_installation() {
generate_service >/lib/systemd/system/iptag.service
msg_ok "Updated service file"
msg_info "Creating manual run command"
cat <<'EOF' >/usr/local/bin/iptag-run
#!/usr/bin/env bash
CONFIG_FILE="/opt/iptag/iptag.conf"
SCRIPT_FILE="/opt/iptag/iptag"
if [[ ! -f "$SCRIPT_FILE" ]]; then
echo "❌ Main script not found: $SCRIPT_FILE"
exit 1
fi
export FORCE_SINGLE_RUN=true
exec "$SCRIPT_FILE"
EOF
chmod +x /usr/local/bin/iptag-run
msg_ok "Created iptag-run executable - You can execute this manually by entering “iptag-run” in the Proxmox host, so the script is executed by hand."
msg_info "Restarting service"
systemctl daemon-reload &>/dev/null
systemctl enable -q --now iptag.service &>/dev/null
@ -209,19 +194,18 @@ LXC_STATUS_CHECK_INTERVAL=300
FORCE_UPDATE_INTERVAL=7200
# Performance optimizations
VM_IP_CACHE_TTL=300
MAX_PARALLEL_VM_CHECKS=1
VM_IP_CACHE_TTL=120
MAX_PARALLEL_VM_CHECKS=5
# LXC performance optimizations
LXC_IP_CACHE_TTL=300
MAX_PARALLEL_LXC_CHECKS=2
LXC_IP_CACHE_TTL=0
MAX_PARALLEL_LXC_CHECKS=7
# Extreme LXC optimizations
LXC_BATCH_SIZE=3
LXC_STATUS_CACHE_TTL=300
LXC_BATCH_SIZE=20
LXC_STATUS_CACHE_TTL=30
LXC_AGGRESSIVE_CACHING=true
LXC_SKIP_SLOW_METHODS=true
LXC_ALLOW_FORCED_COMMANDS=false
# Debug settings (set to true to enable debugging)
DEBUG=false
@ -238,8 +222,8 @@ After=network.target
[Service]
Type=simple
ExecStart=/opt/iptag/iptag
Restart=on-failure
RestartSec=10
Restart=always
RestartSec=1
[Install]
WantedBy=multi-user.target
@ -592,9 +576,7 @@ update_tags() {
if [[ "$type" == "lxc" ]]; then
current_ips_full=$(get_lxc_ips "${vmid}")
while IFS= read -r line; do
[[ "$line" == tags:* ]] && current_tags_raw="${line#tags: }" && break
done < <(pct config "$vmid" 2>/dev/null)
local current_tags_raw=$(pct config "${vmid}" 2>/dev/null | grep tags | awk '{print $2}')
else
current_ips_full=$(get_vm_ips "${vmid}")
local vm_config="/etc/pve/qemu-server/${vmid}.conf"
@ -807,10 +789,7 @@ check_status_changed() {
check() {
local current_time changes_detected=false
current_time=$(date +%s)
local update_lxc=false
local update_vm=false
# Periodic cache cleanup (every 10 minutes)
local time_since_last_cleanup=$((current_time - ${last_cleanup_time:-0}))
if [[ $time_since_last_cleanup -ge 600 ]]; then
@ -822,56 +801,60 @@ check() {
# Check LXC status
local time_since_last_lxc_check=$((current_time - last_lxc_status_check_time))
if [[ "${LXC_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
[[ "$time_since_last_lxc_check" -ge "${LXC_STATUS_CHECK_INTERVAL:-60}" ]]; then
last_lxc_status_check_time=$current_time
[[ "${time_since_last_lxc_check}" -ge "${LXC_STATUS_CHECK_INTERVAL:-60}" ]]; then
last_lxc_status_check_time=${current_time}
if check_status_changed "lxc"; then
update_lxc=true
log_warning "LXC status changes detected"
changes_detected=true
log_warning "LXC status changes detected, updating tags"
update_all_tags "lxc"
last_update_lxc_time=${current_time}
fi
fi
# Check VM status
local time_since_last_vm_check=$((current_time - last_vm_status_check_time))
if [[ "${VM_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
[[ "$time_since_last_vm_check" -ge "${VM_STATUS_CHECK_INTERVAL:-60}" ]]; then
last_vm_status_check_time=$current_time
[[ "${time_since_last_vm_check}" -ge "${VM_STATUS_CHECK_INTERVAL:-60}" ]]; then
last_vm_status_check_time=${current_time}
if check_status_changed "vm"; then
update_vm=true
log_warning "VM status changes detected"
changes_detected=true
log_warning "VM status changes detected, updating tags"
update_all_tags "vm"
last_update_vm_time=${current_time}
fi
fi
# Check network interface changes
local time_since_last_fw_check=$((current_time - last_fw_net_interface_check_time))
if [[ "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" -gt 0 ]] && \
[[ "$time_since_last_fw_check" -ge "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" ]]; then
last_fw_net_interface_check_time=$current_time
[[ "${time_since_last_fw_check}" -ge "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" ]]; then
last_fw_net_interface_check_time=${current_time}
if check_status_changed "fw"; then
update_lxc=true
update_vm=true
log_warning "Network interface changes detected"
changes_detected=true
log_warning "Network interface changes detected, updating all tags"
update_all_tags "lxc"
update_all_tags "vm"
last_update_lxc_time=${current_time}
last_update_vm_time=${current_time}
fi
fi
# Force update if interval exceeded
# Force update if needed
for type in "lxc" "vm"; do
local last_update_var="last_update_${type}_time"
local time_since_last_update=$((current_time - ${!last_update_var}))
if [[ $time_since_last_update -ge ${FORCE_UPDATE_INTERVAL:-1800} ]]; then
if [[ ${time_since_last_update} -ge ${FORCE_UPDATE_INTERVAL:-1800} ]]; then
changes_detected=true
local minutes=$((${FORCE_UPDATE_INTERVAL:-1800} / 60))
if [[ "$type" == "lxc" ]]; then
update_lxc=true
log_info "Scheduled LXC update (every $((FORCE_UPDATE_INTERVAL / 60)) minutes)"
log_info "Scheduled LXC update (every ${minutes} minutes)"
else
update_vm=true
log_info "Scheduled VM update (every $((FORCE_UPDATE_INTERVAL / 60)) minutes)"
log_info "Scheduled VM update (every ${minutes} minutes)"
fi
update_all_tags "$type"
eval "${last_update_var}=${current_time}"
fi
done
# Final execution
$update_lxc && update_all_tags "lxc"
$update_vm && update_all_tags "vm"
}
# Initialize time variables
@ -889,19 +872,9 @@ main() {
echo -e "${BLUE}${NC} Tag format: ${WHITE}${TAG_FORMAT:-$DEFAULT_TAG_FORMAT}${NC}"
echo -e "${BLUE}${NC} Allowed CIDRs: ${WHITE}${CIDR_LIST[*]}${NC}"
echo -e "${PURPLE}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
if [[ "$FORCE_SINGLE_RUN" == "true" ]]; then
check
exit 0
fi
while true; do
check
sleep "${LOOP_INTERVAL:-300}"
done
check
}
# Cache cleanup function
cleanup_vm_cache() {
local cache_dir="/tmp"
@ -1024,7 +997,7 @@ process_vms_parallel() {
# Parallel LXC processing function
process_lxc_parallel() {
local lxc_list=("$@")
local max_parallel=${MAX_PARALLEL_LXC_CHECKS:-2}
local max_parallel=${MAX_PARALLEL_LXC_CHECKS:-7}
local batch_size=${LXC_BATCH_SIZE:-20}
local job_count=0
local pids=()
@ -1200,7 +1173,7 @@ get_lxc_ips() {
fi
# Fallback: always do lxc-attach/pct exec with timeout if nothing found
if [[ -z "$ips" && "${LXC_ALLOW_FORCED_COMMANDS:-true}" == "true" ]]; then
if [[ -z "$ips" ]]; then
debug_log "lxc $vmid: trying fallback lxc-attach (forced)"
local attach_ip=""
attach_ip=$(timeout 7s lxc-attach -n "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
@ -1215,7 +1188,7 @@ get_lxc_ips() {
method_used="lxc_attach_forced"
fi
fi
if [[ -z "$ips" && "${LXC_ALLOW_FORCED_COMMANDS:-true}" == "true" ]]; then
if [[ -z "$ips" ]]; then
debug_log "lxc $vmid: trying fallback pct exec (forced)"
local pct_ip=""
pct_ip=$(timeout 7s pct exec "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
@ -1337,21 +1310,6 @@ msg_info "Restarting Service with optimizations"
systemctl restart iptag.service &>/dev/null
msg_ok "Service restarted with CPU optimizations"
msg_info "Creating manual run command"
cat <<'EOF' >/usr/local/bin/iptag-run
#!/usr/bin/env bash
CONFIG_FILE="/opt/iptag/iptag.conf"
SCRIPT_FILE="/opt/iptag/iptag"
if [[ ! -f "$SCRIPT_FILE" ]]; then
echo "❌ Main script not found: $SCRIPT_FILE"
exit 1
fi
export FORCE_SINGLE_RUN=true
exec "$SCRIPT_FILE"
EOF
chmod +x /usr/local/bin/iptag-run
msg_ok "Created iptag-run executable - You can execute this manually by entering “iptag-run” in the Proxmox host, so the script is executed by hand."
SPINNER_PID=""
echo -e "\n${APP} installation completed successfully! ${CL}\n"