mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2025-07-08 06:57:37 +00:00
Compare commits
94 Commits
CrazyWolf1
...
CrazyWolf1
Author | SHA1 | Date | |
---|---|---|---|
5cde6e772b | |||
16786a8304 | |||
fc728bcca7 | |||
5516aa493d | |||
54f2f98193 | |||
137a41f67c | |||
26365561dc | |||
9dc0fc80b9 | |||
722f7e14d7 | |||
b2a8a9bd00 | |||
12bd6754ab | |||
74166f97f9 | |||
b2bccd9501 | |||
c567b75aa7 | |||
2d77790b64 | |||
37d466103d | |||
d59aa0527a | |||
9ddf10f82e | |||
69d2835c73 | |||
59f99a27d2 | |||
372b52f64d | |||
2586c9f385 | |||
5c5d5d52ce | |||
c2a7e990bd | |||
3847442ca5 | |||
6996111473 | |||
3336f6a6f5 | |||
be6a63cd03 | |||
160846e98b | |||
e63128625e | |||
e58ad9237a | |||
2ce64b5004 | |||
9893379eef | |||
9ae95d1eb5 | |||
8a178b6f45 | |||
f0b645c894 | |||
2be5d83a6d | |||
fafb7501c5 | |||
4bac3063da | |||
29d3015314 | |||
4d2fcb2c66 | |||
ca6b1a95dd | |||
14cb9f097d | |||
4d4726ccbe | |||
3ce5893fe3 | |||
993b01fa83 | |||
5c4abb6d1d | |||
06ec9593da | |||
ace106051b | |||
e2396f6667 | |||
067b3c2f02 | |||
73f1c261ab | |||
2cbf0f744f | |||
a6517f4d7e | |||
e492a9b5d9 | |||
5e7eb4ae3a | |||
1a8a34b533 | |||
c38b4c2ef3 | |||
679699e6af | |||
32a724ac8a | |||
798a7b323e | |||
f1eda9a270 | |||
5eaee5054f | |||
e140631760 | |||
ae24b56c61 | |||
cb7d58b9b0 | |||
115b21f729 | |||
9072459066 | |||
c2f9737435 | |||
e5bea1f49a | |||
f7e9fbc473 | |||
66a5730288 | |||
1cefb1b842 | |||
16ac51b551 | |||
28ed2da95f | |||
0c98308b60 | |||
9128b9dd12 | |||
301a23e5f4 | |||
2b848ff1d8 | |||
3d69931675 | |||
3e504cf48f | |||
d2cbfcd69a | |||
ebfb6a4e34 | |||
7403470bd7 | |||
af9475d280 | |||
8882a17b6f | |||
32d6194ade | |||
569089cb73 | |||
f494e68016 | |||
7561e26c0a | |||
a95be13c95 | |||
9ab50d4248 | |||
f5be1d270a | |||
524a2a422d |
27
.github/autolabeler-config.json
generated
vendored
27
.github/autolabeler-config.json
generated
vendored
@ -121,5 +121,32 @@
|
|||||||
],
|
],
|
||||||
"excludeGlobs": []
|
"excludeGlobs": []
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"addon": [
|
||||||
|
{
|
||||||
|
"fileStatus": null,
|
||||||
|
"includeGlobs": [
|
||||||
|
"tools/addon/**"
|
||||||
|
],
|
||||||
|
"excludeGlobs": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pve-tool": [
|
||||||
|
{
|
||||||
|
"fileStatus": null,
|
||||||
|
"includeGlobs": [
|
||||||
|
"tools/pve/**"
|
||||||
|
],
|
||||||
|
"excludeGlobs": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"vm": [
|
||||||
|
{
|
||||||
|
"fileStatus": null,
|
||||||
|
"includeGlobs": [
|
||||||
|
"vm/**"
|
||||||
|
],
|
||||||
|
"excludeGlobs": []
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
36
.github/workflows/autolabeler.yml
generated
vendored
36
.github/workflows/autolabeler.yml
generated
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm install minimatch
|
run: npm install minimatch
|
||||||
|
|
||||||
- name: Label PR based on file changes and PR template
|
- name: Label PR based on file changes and PR template
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
@ -43,51 +43,51 @@ jobs:
|
|||||||
pull_number: prNumber,
|
pull_number: prNumber,
|
||||||
});
|
});
|
||||||
const prFiles = prListFilesResponse.data;
|
const prFiles = prListFilesResponse.data;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Apply labels based on file changes
|
|
||||||
for (const [label, rules] of Object.entries(autolabelerConfig)) {
|
for (const [label, rules] of Object.entries(autolabelerConfig)) {
|
||||||
const shouldAddLabel = prFiles.some((prFile) => {
|
const shouldAddLabel = prFiles.some((prFile) => {
|
||||||
return rules.some((rule) => {
|
return rules.some((rule) => {
|
||||||
const isFileStatusMatch = rule.fileStatus ? rule.fileStatus === prFile.status : true;
|
const isFileStatusMatch = rule.fileStatus ? rule.fileStatus === prFile.status : true;
|
||||||
const isIncludeGlobMatch = rule.includeGlobs.some((glob) => minimatch(prFile.filename, glob));
|
const isIncludeGlobMatch = rule.includeGlobs.some((glob) => minimatch(prFile.filename, glob));
|
||||||
const isExcludeGlobMatch = rule.excludeGlobs.some((glob) => minimatch(prFile.filename, glob));
|
const isExcludeGlobMatch = rule.excludeGlobs.some((glob) => minimatch(prFile.filename, glob));
|
||||||
|
|
||||||
return isFileStatusMatch && isIncludeGlobMatch && !isExcludeGlobMatch;
|
return isFileStatusMatch && isIncludeGlobMatch && !isExcludeGlobMatch;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
if (shouldAddLabel) {
|
if (shouldAddLabel) {
|
||||||
labelsToAdd.add(label);
|
labelsToAdd.add(label);
|
||||||
|
if (label === "update script") {
|
||||||
|
for (const prFile of prFiles) {
|
||||||
|
const filename = prFile.filename;
|
||||||
|
if (filename.startsWith("vm/")) labelsToAdd.add("vm");
|
||||||
|
if (filename.startsWith("tools/addon/")) labelsToAdd.add("addon");
|
||||||
|
if (filename.startsWith("tools/pve/")) labelsToAdd.add("pve-tool");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//if two labels or more are added, return
|
|
||||||
if (labelsToAdd.size < 2) {
|
if (labelsToAdd.size < 2) {
|
||||||
const templateLabelMappings = {
|
const templateLabelMappings = {
|
||||||
"🐞 **Bug fix**": "bugfix",
|
"🐞 **Bug fix**": "bugfix",
|
||||||
"✨ **New feature**": "feature",
|
"✨ **New feature**": "feature",
|
||||||
"💥 **Breaking change**": "breaking change",
|
"💥 **Breaking change**": "breaking change",
|
||||||
|
"🆕 **New script**": "new script",
|
||||||
|
"🌍 **Website update**": "website",
|
||||||
"🔧 **Refactoring / Code Cleanup**": "refactor",
|
"🔧 **Refactoring / Code Cleanup**": "refactor",
|
||||||
|
"📝 **Documentation update**": "documentation"
|
||||||
};
|
};
|
||||||
|
|
||||||
for (const [checkbox, label] of Object.entries(templateLabelMappings)) {
|
for (const [checkbox, label] of Object.entries(templateLabelMappings)) {
|
||||||
const escapedCheckbox = checkbox.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, "\\$1");
|
const escapedCheckbox = checkbox.replace(/([.*+?^=!:${}()|[\]\/\\])/g, "\\$1");
|
||||||
const regex = new RegExp(`- \\[(x|X)\\]\\s*.*${escapedCheckbox}`, "i");
|
const regex = new RegExp(`- \[(x|X)\]\s*.*${escapedCheckbox}`, "i");
|
||||||
const match = prBody.match(regex);
|
if (regex.test(prBody)) {
|
||||||
if (match) {
|
|
||||||
console.log(`Match: ${match}`);
|
|
||||||
labelsToAdd.add(label);
|
labelsToAdd.add(label);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
console.log(`Labels to add: ${Array.from(labelsToAdd).join(", ")}`);
|
|
||||||
|
|
||||||
if (labelsToAdd.size > 0) {
|
if (labelsToAdd.size > 0) {
|
||||||
console.log(`Adding labels ${Array.from(labelsToAdd).join(", ")} to PR ${prNumber}`);
|
|
||||||
await github.rest.issues.addLabels({
|
await github.rest.issues.addLabels({
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
|
75
CHANGELOG.md
75
CHANGELOG.md
@ -10,9 +10,70 @@
|
|||||||
> [!CAUTION]
|
> [!CAUTION]
|
||||||
Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit the project's popularity for potentially malicious purposes.
|
Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit the project's popularity for potentially malicious purposes.
|
||||||
|
|
||||||
> [!NOTE]
|
## 2025-07-07
|
||||||
All LXC instances created using this repository come pre-installed with Midnight Commander, which is a command-line tool (`mc`) that offers a user-friendly file and directory management interface for the terminal environment.
|
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- Fix unbound var in pulse.sh [@michelroegl-brunner](https://github.com/michelroegl-brunner) ([#5807](https://github.com/community-scripts/ProxmoxVE/pull/5807))
|
||||||
|
- Refactor: Docmost [@tremor021](https://github.com/tremor021) ([#5806](https://github.com/community-scripts/ProxmoxVE/pull/5806))
|
||||||
|
- Fix/stirling pdf script [@JcMinarro](https://github.com/JcMinarro) ([#5803](https://github.com/community-scripts/ProxmoxVE/pull/5803))
|
||||||
|
- gitea-mirror: update repo-url [@CrazyWolf13](https://github.com/CrazyWolf13) ([#5794](https://github.com/community-scripts/ProxmoxVE/pull/5794))
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- Bookstack: Fix PHP Issue & Bump to PHP 8.3 [@MickLesk](https://github.com/MickLesk) ([#5779](https://github.com/community-scripts/ProxmoxVE/pull/5779))
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- Refactor: Threadfin (+ updatable) [@MickLesk](https://github.com/MickLesk) ([#5783](https://github.com/community-scripts/ProxmoxVE/pull/5783))
|
||||||
|
- tools.func: better handling when unpacking tarfiles in prebuild mode [@MickLesk](https://github.com/MickLesk) ([#5781](https://github.com/community-scripts/ProxmoxVE/pull/5781))
|
||||||
|
- tools.func: add AVX check for MongoDB [@MickLesk](https://github.com/MickLesk) ([#5780](https://github.com/community-scripts/ProxmoxVE/pull/5780))
|
||||||
|
|
||||||
|
- #### 🔧 Refactor
|
||||||
|
|
||||||
|
- Refactor: Baby Buddy [@tremor021](https://github.com/tremor021) ([#5769](https://github.com/community-scripts/ProxmoxVE/pull/5769))
|
||||||
|
- Refactor: Changed the way we install BunkerWeb by leveraging the brand new install-bunkerweb.sh [@TheophileDiot](https://github.com/TheophileDiot) ([#5707](https://github.com/community-scripts/ProxmoxVE/pull/5707))
|
||||||
|
|
||||||
|
### 🌐 Website
|
||||||
|
|
||||||
|
- #### 📝 Script Information
|
||||||
|
|
||||||
|
- PBS: add hint for advanced installs [@MickLesk](https://github.com/MickLesk) ([#5788](https://github.com/community-scripts/ProxmoxVE/pull/5788))
|
||||||
|
- EMQX: Add warning to website [@tremor021](https://github.com/tremor021) ([#5770](https://github.com/community-scripts/ProxmoxVE/pull/5770))
|
||||||
|
|
||||||
|
## 2025-07-06
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- Refactor: Barcodebuddy [@tremor021](https://github.com/tremor021) ([#5735](https://github.com/community-scripts/ProxmoxVE/pull/5735))
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- Fix update script for Mafl: ensure directory is removed recursively [@jonalbr](https://github.com/jonalbr) ([#5759](https://github.com/community-scripts/ProxmoxVE/pull/5759))
|
||||||
|
- BookStack: Typo fix [@tremor021](https://github.com/tremor021) ([#5746](https://github.com/community-scripts/ProxmoxVE/pull/5746))
|
||||||
|
- Resolves incorrect URL at end of Pocket ID script [@johnsturgeon](https://github.com/johnsturgeon) ([#5743](https://github.com/community-scripts/ProxmoxVE/pull/5743))
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- [Feature] Add option to expose Docker via TCP port (alpine docker) [@oformaniuk](https://github.com/oformaniuk) ([#5716](https://github.com/community-scripts/ProxmoxVE/pull/5716))
|
||||||
|
|
||||||
|
- #### 🔧 Refactor
|
||||||
|
|
||||||
|
- Refactor: Bitmagnet [@tremor021](https://github.com/tremor021) ([#5733](https://github.com/community-scripts/ProxmoxVE/pull/5733))
|
||||||
|
- Refactor: Baikal [@tremor021](https://github.com/tremor021) ([#5736](https://github.com/community-scripts/ProxmoxVE/pull/5736))
|
||||||
|
|
||||||
|
## 2025-07-05
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- #### 🔧 Refactor
|
||||||
|
|
||||||
|
- Refactor: BookStack [@tremor021](https://github.com/tremor021) ([#5732](https://github.com/community-scripts/ProxmoxVE/pull/5732))
|
||||||
|
- Refactor: Authelia [@tremor021](https://github.com/tremor021) ([#5722](https://github.com/community-scripts/ProxmoxVE/pull/5722))
|
||||||
|
- Refactor: Dashy [@tremor021](https://github.com/tremor021) ([#5723](https://github.com/community-scripts/ProxmoxVE/pull/5723))
|
||||||
|
- Refactor: CryptPad [@tremor021](https://github.com/tremor021) ([#5724](https://github.com/community-scripts/ProxmoxVE/pull/5724))
|
||||||
|
- Refactor: ByteStash [@tremor021](https://github.com/tremor021) ([#5725](https://github.com/community-scripts/ProxmoxVE/pull/5725))
|
||||||
|
- Refactor: AgentDVR [@tremor021](https://github.com/tremor021) ([#5726](https://github.com/community-scripts/ProxmoxVE/pull/5726))
|
||||||
|
|
||||||
## 2025-07-04
|
## 2025-07-04
|
||||||
|
|
||||||
@ -20,12 +81,20 @@ All LXC instances created using this repository come pre-installed with Midnight
|
|||||||
|
|
||||||
- #### 🐞 Bug Fixes
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- Refactor: Mafl [@tremor021](https://github.com/tremor021) ([#5702](https://github.com/community-scripts/ProxmoxVE/pull/5702))
|
||||||
- Outline: Fix sed command for v0.85.0 [@tremor021](https://github.com/tremor021) ([#5688](https://github.com/community-scripts/ProxmoxVE/pull/5688))
|
- Outline: Fix sed command for v0.85.0 [@tremor021](https://github.com/tremor021) ([#5688](https://github.com/community-scripts/ProxmoxVE/pull/5688))
|
||||||
- Komodo: Update Script to use FerretDB / remove psql & sqlite options [@MickLesk](https://github.com/MickLesk) ([#5690](https://github.com/community-scripts/ProxmoxVE/pull/5690))
|
- Komodo: Update Script to use FerretDB / remove psql & sqlite options [@MickLesk](https://github.com/MickLesk) ([#5690](https://github.com/community-scripts/ProxmoxVE/pull/5690))
|
||||||
- ESPHome: Fix Linking issue to prevent version mismatch [@MickLesk](https://github.com/MickLesk) ([#5685](https://github.com/community-scripts/ProxmoxVE/pull/5685))
|
- ESPHome: Fix Linking issue to prevent version mismatch [@MickLesk](https://github.com/MickLesk) ([#5685](https://github.com/community-scripts/ProxmoxVE/pull/5685))
|
||||||
- Update Iptag [@DesertGamer](https://github.com/DesertGamer) ([#5677](https://github.com/community-scripts/ProxmoxVE/pull/5677))
|
|
||||||
- Cloudflare-DDNS: fix unvisible read command at install [@MickLesk](https://github.com/MickLesk) ([#5682](https://github.com/community-scripts/ProxmoxVE/pull/5682))
|
- Cloudflare-DDNS: fix unvisible read command at install [@MickLesk](https://github.com/MickLesk) ([#5682](https://github.com/community-scripts/ProxmoxVE/pull/5682))
|
||||||
|
|
||||||
|
- #### ✨ New Features
|
||||||
|
|
||||||
|
- Core layer refactor: centralized error traps and msg_* consistency [@MickLesk](https://github.com/MickLesk) ([#5705](https://github.com/community-scripts/ProxmoxVE/pull/5705))
|
||||||
|
|
||||||
|
- #### 💥 Breaking Changes
|
||||||
|
|
||||||
|
- Update Iptag [@DesertGamer](https://github.com/DesertGamer) ([#5677](https://github.com/community-scripts/ProxmoxVE/pull/5677))
|
||||||
|
|
||||||
### 🌐 Website
|
### 🌐 Website
|
||||||
|
|
||||||
- #### 📝 Script Information
|
- #### 📝 Script Information
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="#">
|
<a href="#">
|
||||||
<img src="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/images/logo.png" height="100px" />
|
<img src="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/images/logo-81x112.png" height="100px" />
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
@ -22,30 +22,30 @@ color
|
|||||||
catch_errors
|
catch_errors
|
||||||
|
|
||||||
function update_script() {
|
function update_script() {
|
||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
if [[ ! -d "/etc/authelia/" ]]; then
|
if [[ ! -d "/etc/authelia/" ]]; then
|
||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
|
||||||
fi
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
|
||||||
if [[ "${RELEASE}" != "$(/usr/bin/authelia -v | awk '{print substr($3, 2, length($2)) }')" ]]; then
|
|
||||||
msg_info "Updating $APP to ${RELEASE}"
|
|
||||||
$STD apt-get update
|
|
||||||
$STD apt-get -y upgrade
|
|
||||||
curl -fsSL "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb" -o $(basename "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb")
|
|
||||||
$STD dpkg -i "authelia_${RELEASE}_amd64.deb"
|
|
||||||
msg_info "Cleaning Up"
|
|
||||||
rm -f "authelia_${RELEASE}_amd64.deb"
|
|
||||||
$STD apt-get -y autoremove
|
|
||||||
$STD apt-get -y autoclean
|
|
||||||
msg_ok "Cleanup Completed"
|
|
||||||
msg_ok "Updated $APP to ${RELEASE}"
|
|
||||||
else
|
|
||||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
|
||||||
fi
|
|
||||||
exit
|
exit
|
||||||
|
fi
|
||||||
|
RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||||
|
if [[ "${RELEASE}" != "$(/usr/bin/authelia -v | awk '{print substr($3, 2, length($2)) }')" ]]; then
|
||||||
|
$STD apt-get update
|
||||||
|
$STD apt-get -y upgrade
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "authelia" "authelia/authelia" "binary"
|
||||||
|
|
||||||
|
msg_info "Cleaning Up"
|
||||||
|
$STD apt-get -y autoremove
|
||||||
|
$STD apt-get -y autoclean
|
||||||
|
msg_ok "Cleanup Completed"
|
||||||
|
|
||||||
|
msg_ok "Updated $APP to ${RELEASE}"
|
||||||
|
else
|
||||||
|
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||||
|
fi
|
||||||
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
@ -29,7 +29,7 @@ function update_script() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/babybuddy/babybuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/babybuddy/babybuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/babybuddy_version.txt)" ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.babybuddy 2>/dev/null)" ]] || [[ ! -f ~/.babybuddy ]]; then
|
||||||
setup_uv
|
setup_uv
|
||||||
|
|
||||||
msg_info "Stopping Services"
|
msg_info "Stopping Services"
|
||||||
@ -42,17 +42,14 @@ function update_script() {
|
|||||||
find . -mindepth 1 -maxdepth 1 ! -name '.venv' -exec rm -rf {} +
|
find . -mindepth 1 -maxdepth 1 ! -name '.venv' -exec rm -rf {} +
|
||||||
msg_ok "Cleaned old files"
|
msg_ok "Cleaned old files"
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy"
|
||||||
|
|
||||||
msg_info "Updating ${APP} to v${RELEASE}"
|
msg_info "Updating ${APP} to v${RELEASE}"
|
||||||
temp_file=$(mktemp)
|
|
||||||
curl -fsSL "https://github.com/babybuddy/babybuddy/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
|
||||||
cd /opt/babybuddy
|
cd /opt/babybuddy
|
||||||
tar zxf "$temp_file" --strip-components=1 -C /opt/babybuddy
|
|
||||||
mv /tmp/production.py.bak babybuddy/settings/production.py
|
mv /tmp/production.py.bak babybuddy/settings/production.py
|
||||||
cd /opt/babybuddy
|
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
$STD uv pip install -r requirements.txt
|
$STD uv pip install -r requirements.txt
|
||||||
$STD python manage.py migrate
|
$STD python manage.py migrate
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
|
||||||
msg_ok "Updated ${APP} to v${RELEASE}"
|
msg_ok "Updated ${APP} to v${RELEASE}"
|
||||||
|
|
||||||
msg_info "Fixing permissions"
|
msg_info "Fixing permissions"
|
||||||
@ -66,9 +63,6 @@ function update_script() {
|
|||||||
systemctl start nginx
|
systemctl start nginx
|
||||||
msg_ok "Services Started"
|
msg_ok "Services Started"
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
|
||||||
rm -f "$temp_file"
|
|
||||||
msg_ok "Cleaned"
|
|
||||||
msg_ok "Updated Successfully"
|
msg_ok "Updated Successfully"
|
||||||
else
|
else
|
||||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||||
|
17
ct/baikal.sh
17
ct/baikal.sh
@ -23,34 +23,35 @@ function update_script() {
|
|||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
|
|
||||||
if [[ ! -d /opt/baikal ]]; then
|
if [[ ! -d /opt/baikal ]]; then
|
||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/sabre-io/Baikal/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/sabre-io/Baikal/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.baikal 2>/dev/null)" ]] || [[ ! -f ~/.baikal ]]; then
|
||||||
msg_info "Stopping Service"
|
msg_info "Stopping Service"
|
||||||
systemctl stop apache2
|
systemctl stop apache2
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
msg_info "Updating ${APP} to v${RELEASE}"
|
msg_info "Backing up data"
|
||||||
cd /opt
|
|
||||||
curl -fsSL "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip" -o $(basename "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip")
|
|
||||||
mv /opt/baikal /opt/baikal-backup
|
mv /opt/baikal /opt/baikal-backup
|
||||||
$STD unzip -o "baikal-${RELEASE}.zip"
|
msg_ok "Backed up data"
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "baikal" "sabre-io/Baikal"
|
||||||
|
|
||||||
|
msg_info "Configuring Baikal"
|
||||||
cp -r /opt/baikal-backup/config/baikal.yaml /opt/baikal/config/
|
cp -r /opt/baikal-backup/config/baikal.yaml /opt/baikal/config/
|
||||||
cp -r /opt/baikal-backup/Specific/ /opt/baikal/
|
cp -r /opt/baikal-backup/Specific/ /opt/baikal/
|
||||||
chown -R www-data:www-data /opt/baikal/
|
chown -R www-data:www-data /opt/baikal/
|
||||||
chmod -R 755 /opt/baikal/
|
chmod -R 755 /opt/baikal/
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
msg_ok "Configured Baikal"
|
||||||
msg_ok "Updated $APP to v${RELEASE}"
|
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl start apache2
|
systemctl start apache2
|
||||||
msg_ok "Started Service"
|
msg_ok "Started Service"
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -rf "/opt/baikal-${RELEASE}.zip"
|
|
||||||
rm -rf /opt/baikal-backup
|
rm -rf /opt/baikal-backup
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
msg_ok "Updated Successfully"
|
msg_ok "Updated Successfully"
|
||||||
|
@ -23,27 +23,28 @@ function update_script() {
|
|||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
|
|
||||||
if [[ ! -d /opt/barcodebuddy ]]; then
|
if [[ ! -d /opt/barcodebuddy ]]; then
|
||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/Forceu/barcodebuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/Forceu/barcodebuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.barcodebuddy 2>/dev/null)" ]] || [[ ! -f ~/.barcodebuddy ]]; then
|
||||||
msg_info "Stopping Service"
|
msg_info "Stopping Service"
|
||||||
systemctl stop apache2
|
systemctl stop apache2
|
||||||
systemctl stop barcodebuddy
|
systemctl stop barcodebuddy
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
msg_info "Updating ${APP} to v${RELEASE}"
|
msg_info "Backing up data"
|
||||||
cd /opt
|
|
||||||
mv /opt/barcodebuddy/ /opt/barcodebuddy-backup
|
mv /opt/barcodebuddy/ /opt/barcodebuddy-backup
|
||||||
curl -fsSL "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip" -o $(basename "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip")
|
msg_ok "Backed up data"
|
||||||
$STD unzip "v${RELEASE}.zip"
|
|
||||||
mv "/opt/barcodebuddy-${RELEASE}" /opt/barcodebuddy
|
fetch_and_deploy_gh_release "barcodebuddy" "Forceu/barcodebuddy"
|
||||||
|
|
||||||
|
msg_info "Configuring ${APP}"
|
||||||
cp -r /opt/barcodebuddy-backup/data/. /opt/barcodebuddy/data
|
cp -r /opt/barcodebuddy-backup/data/. /opt/barcodebuddy/data
|
||||||
chown -R www-data:www-data /opt/barcodebuddy/data
|
chown -R www-data:www-data /opt/barcodebuddy/data
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
msg_ok "Configured ${APP}"
|
||||||
msg_ok "Updated $APP to v${RELEASE}"
|
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl start apache2
|
systemctl start apache2
|
||||||
@ -51,7 +52,6 @@ function update_script() {
|
|||||||
msg_ok "Started Service"
|
msg_ok "Started Service"
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -r "/opt/v${RELEASE}.zip"
|
|
||||||
rm -r /opt/barcodebuddy-backup
|
rm -r /opt/barcodebuddy-backup
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
msg_ok "Updated Successfully"
|
msg_ok "Updated Successfully"
|
||||||
|
@ -28,12 +28,12 @@ function update_script() {
|
|||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.bitmagnet 2>/dev/null)" ]] || [[ ! -f ~/.bitmagnet ]]; then
|
||||||
msg_info "Stopping Service"
|
msg_info "Stopping Service"
|
||||||
systemctl stop bitmagnet-web
|
systemctl stop bitmagnet-web
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
msg_info "Backing up database"
|
msg_info "Backing up data"
|
||||||
rm -f /tmp/backup.sql
|
rm -f /tmp/backup.sql
|
||||||
$STD sudo -u postgres pg_dump \
|
$STD sudo -u postgres pg_dump \
|
||||||
--column-inserts \
|
--column-inserts \
|
||||||
@ -56,31 +56,26 @@ function update_script() {
|
|||||||
bitmagnet \
|
bitmagnet \
|
||||||
>/tmp/backup.sql
|
>/tmp/backup.sql
|
||||||
mv /tmp/backup.sql /opt/
|
mv /tmp/backup.sql /opt/
|
||||||
msg_ok "Database backed up"
|
|
||||||
|
|
||||||
msg_info "Updating ${APP} to v${RELEASE}"
|
|
||||||
[ -f /opt/bitmagnet/.env ] && cp /opt/bitmagnet/.env /opt/
|
[ -f /opt/bitmagnet/.env ] && cp /opt/bitmagnet/.env /opt/
|
||||||
[ -f /opt/bitmagnet/config.yml ] && cp /opt/bitmagnet/config.yml /opt/
|
[ -f /opt/bitmagnet/config.yml ] && cp /opt/bitmagnet/config.yml /opt/
|
||||||
rm -rf /opt/bitmagnet/*
|
msg_ok "Data backed up"
|
||||||
temp_file=$(mktemp)
|
|
||||||
curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
rm -rf /opt/bitmagnet
|
||||||
tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
|
fetch_and_deploy_gh_release "bitmagnet" "bitmagnet-io/bitmagnet"
|
||||||
|
|
||||||
|
msg_info "Updating ${APP} to v${RELEASE}"
|
||||||
cd /opt/bitmagnet
|
cd /opt/bitmagnet
|
||||||
VREL=v$RELEASE
|
VREL=v$RELEASE
|
||||||
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
|
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
|
||||||
chmod +x bitmagnet
|
chmod +x bitmagnet
|
||||||
[ -f "/opt/.env" ] && cp "/opt/.env" /opt/bitmagnet/
|
[ -f "/opt/.env" ] && cp "/opt/.env" /opt/bitmagnet/
|
||||||
[ -f "/opt/config.yml" ] && cp "/opt/config.yml" /opt/bitmagnet/
|
[ -f "/opt/config.yml" ] && cp "/opt/config.yml" /opt/bitmagnet/
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
|
||||||
msg_ok "Updated $APP to v${RELEASE}"
|
msg_ok "Updated $APP to v${RELEASE}"
|
||||||
|
|
||||||
msg_info "Starting Service"
|
msg_info "Starting Service"
|
||||||
systemctl start bitmagnet-web
|
systemctl start bitmagnet-web
|
||||||
msg_ok "Started Service"
|
msg_ok "Started Service"
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
|
||||||
rm -f "$temp_file"
|
|
||||||
msg_ok "Cleaned"
|
|
||||||
msg_ok "Updated Successfully"
|
msg_ok "Updated Successfully"
|
||||||
else
|
else
|
||||||
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||||
|
@ -23,25 +23,32 @@ function update_script() {
|
|||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
|
|
||||||
if [[ ! -d /opt/bookstack ]]; then
|
if [[ ! -d /opt/bookstack ]]; then
|
||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.bookstack 2>/dev/null)" ]] || [[ ! -f ~/.bookstack ]]; then
|
||||||
msg_info "Stopping Apache2"
|
msg_info "Stopping Apache2"
|
||||||
systemctl stop apache2
|
systemctl stop apache2
|
||||||
msg_ok "Services Stopped"
|
msg_ok "Services Stopped"
|
||||||
|
|
||||||
msg_info "Updating ${APP} to v${RELEASE}"
|
msg_info "Backing up data"
|
||||||
mv /opt/bookstack /opt/bookstack-backup
|
mv /opt/bookstack /opt/bookstack-backup
|
||||||
curl -fsSL "https://github.com/BookStackApp/BookStack/archive/refs/tags/v${RELEASE}.zip" -o "/opt/BookStack-${RELEASE}.zip"
|
msg_ok "Backup finished"
|
||||||
$STD unzip "/opt/BookStack-${RELEASE}.zip" -d /opt
|
|
||||||
mv "/opt/BookStack-${RELEASE}" /opt/bookstack
|
fetch_and_deploy_gh_release "bookstack" "BookStackApp/BookStack"
|
||||||
|
PHP_MODULE="ldap,tidy,bz2,mysqli" PHP_FPM="YES" PHP_APACHE="YES" PHP_VERSION="8.3" setup_php
|
||||||
|
|
||||||
|
msg_info "Restoring backup"
|
||||||
cp /opt/bookstack-backup/.env /opt/bookstack/.env
|
cp /opt/bookstack-backup/.env /opt/bookstack/.env
|
||||||
[[ -d /opt/bookstack-backup/public/uploads ]] && cp -a /opt/bookstack-backup/public/uploads/. /opt/bookstack/public/uploads/
|
[[ -d /opt/bookstack-backup/public/uploads ]] && cp -a /opt/bookstack-backup/public/uploads/. /opt/bookstack/public/uploads/
|
||||||
[[ -d /opt/bookstack-backup/storage/uploads ]] && cp -a /opt/bookstack-backup/storage/uploads/. /opt/bookstack/storage/uploads/
|
[[ -d /opt/bookstack-backup/storage/uploads ]] && cp -a /opt/bookstack-backup/storage/uploads/. /opt/bookstack/storage/uploads/
|
||||||
[[ -d /opt/bookstack-backup/themes ]] && cp -a /opt/bookstack-backup/themes/. /opt/bookstack/themes/
|
[[ -d /opt/bookstack-backup/themes ]] && cp -a /opt/bookstack-backup/themes/. /opt/bookstack/themes/
|
||||||
|
msg_ok "Backup restored"
|
||||||
|
|
||||||
|
msg_info "Configuring BookStack"
|
||||||
cd /opt/bookstack
|
cd /opt/bookstack
|
||||||
export COMPOSER_ALLOW_SUPERUSER=1
|
export COMPOSER_ALLOW_SUPERUSER=1
|
||||||
$STD composer install --no-dev
|
$STD composer install --no-dev
|
||||||
@ -51,7 +58,7 @@ function update_script() {
|
|||||||
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
|
chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstack/public/uploads
|
||||||
chmod -R 640 /opt/bookstack/.env
|
chmod -R 640 /opt/bookstack/.env
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||||
msg_ok "Updated ${APP} to v${RELEASE}"
|
msg_ok "Configured BookStack"
|
||||||
|
|
||||||
msg_info "Starting Apache2"
|
msg_info "Starting Apache2"
|
||||||
systemctl start apache2
|
systemctl start apache2
|
||||||
@ -59,7 +66,6 @@ function update_script() {
|
|||||||
|
|
||||||
msg_info "Cleaning Up"
|
msg_info "Cleaning Up"
|
||||||
rm -rf /opt/bookstack-backup
|
rm -rf /opt/bookstack-backup
|
||||||
rm -rf "/opt/BookStack-${RELEASE}.zip"
|
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
msg_ok "Updated Successfully"
|
msg_ok "Updated Successfully"
|
||||||
else
|
else
|
||||||
|
@ -37,8 +37,8 @@ Pin: version ${RELEASE}
|
|||||||
Pin-Priority: 1001
|
Pin-Priority: 1001
|
||||||
EOF
|
EOF
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install -y nginx=1.26.3*
|
apt-mark unhold bunkerweb nginx
|
||||||
apt-get install -y bunkerweb=${RELEASE}
|
apt-get install -y --allow-downgrades bunkerweb=${RELEASE}
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
echo "${RELEASE}" >/opt/${APP}_version.txt
|
||||||
msg_ok "Updated ${APP} to ${RELEASE}"
|
msg_ok "Updated ${APP} to ${RELEASE}"
|
||||||
|
|
||||||
|
@ -20,47 +20,47 @@ color
|
|||||||
catch_errors
|
catch_errors
|
||||||
|
|
||||||
function update_script() {
|
function update_script() {
|
||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
if [[ ! -d /opt/bytestash ]]; then
|
|
||||||
msg_error "No ${APP} Installation Found!"
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
|
||||||
msg_info "Stopping Services"
|
|
||||||
systemctl stop bytestash-backend
|
|
||||||
systemctl stop bytestash-frontend
|
|
||||||
msg_ok "Services Stopped"
|
|
||||||
|
|
||||||
msg_info "Updating ${APP} to ${RELEASE}"
|
if [[ ! -d /opt/bytestash ]]; then
|
||||||
temp_file=$(mktemp)
|
msg_error "No ${APP} Installation Found!"
|
||||||
curl -fsSL "https://github.com/jordan-dalby/ByteStash/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
|
||||||
tar zxf $temp_file
|
|
||||||
rm -rf /opt/bytestash/server/node_modules
|
|
||||||
rm -rf /opt/bytestash/client/node_modules
|
|
||||||
cp -rf ByteStash-${RELEASE}/* /opt/bytestash
|
|
||||||
cd /opt/bytestash/server
|
|
||||||
$STD npm install
|
|
||||||
cd /opt/bytestash/client
|
|
||||||
$STD npm install
|
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
|
||||||
msg_ok "Updated ${APP}"
|
|
||||||
|
|
||||||
msg_info "Starting Services"
|
|
||||||
systemctl start bytestash-backend
|
|
||||||
systemctl start bytestash-frontend
|
|
||||||
msg_ok "Started Services"
|
|
||||||
|
|
||||||
msg_info "Cleaning Up"
|
|
||||||
rm -f $temp_file
|
|
||||||
msg_ok "Cleaned"
|
|
||||||
msg_ok "Updated Successfully"
|
|
||||||
else
|
|
||||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
|
||||||
fi
|
|
||||||
exit
|
exit
|
||||||
|
fi
|
||||||
|
RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
|
if [[ "${RELEASE}" != "$(cat ~/.bytestash 2>/dev/null)" ]] || [[ ! -f ~/.bytestash ]]; then
|
||||||
|
|
||||||
|
read -rp "${TAB3}Did you make a backup via application WebUI? (y/n): " backuped
|
||||||
|
if [[ "$backuped" =~ ^[Yy]$ ]]; then
|
||||||
|
msg_info "Stopping Services"
|
||||||
|
systemctl stop bytestash-backend
|
||||||
|
systemctl stop bytestash-frontend
|
||||||
|
msg_ok "Services Stopped"
|
||||||
|
|
||||||
|
rm -rf /opt/bytestash
|
||||||
|
fetch_and_deploy_gh_release "bytestash" "jordan-dalby/ByteStash"
|
||||||
|
|
||||||
|
msg_info "Configuring ByteStash"
|
||||||
|
cd /opt/bytestash/server
|
||||||
|
$STD npm install
|
||||||
|
cd /opt/bytestash/client
|
||||||
|
$STD npm install
|
||||||
|
msg_ok "Updated ${APP}"
|
||||||
|
|
||||||
|
msg_info "Starting Services"
|
||||||
|
systemctl start bytestash-backend
|
||||||
|
systemctl start bytestash-frontend
|
||||||
|
msg_ok "Started Services"
|
||||||
|
else
|
||||||
|
msg_error "PLEASE MAKE A BACKUP FIRST!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
msg_ok "Updated Successfully"
|
||||||
|
else
|
||||||
|
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||||
|
fi
|
||||||
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
@ -70,4 +70,4 @@ description
|
|||||||
msg_ok "Completed Successfully!\n"
|
msg_ok "Completed Successfully!\n"
|
||||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
||||||
|
@ -20,48 +20,46 @@ color
|
|||||||
catch_errors
|
catch_errors
|
||||||
|
|
||||||
function update_script() {
|
function update_script() {
|
||||||
header_info
|
header_info
|
||||||
check_container_storage
|
check_container_storage
|
||||||
check_container_resources
|
check_container_resources
|
||||||
|
|
||||||
if [[ ! -d "/opt/cryptpad" ]]; then
|
if [[ ! -d "/opt/cryptpad" ]]; then
|
||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
|
||||||
fi
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
|
||||||
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
|
||||||
msg_info "Stopping $APP"
|
|
||||||
systemctl stop cryptpad
|
|
||||||
msg_ok "Stopped $APP"
|
|
||||||
|
|
||||||
msg_info "Updating $APP to ${RELEASE}"
|
|
||||||
temp_dir=$(mktemp -d)
|
|
||||||
cp -f /opt/cryptpad/config/config.js /opt/config.js
|
|
||||||
curl -fsSL "https://github.com/cryptpad/cryptpad/archive/refs/tags/${RELEASE}.tar.gz" -o "$temp_dir/cryptpad-${RELEASE}.tar.gz"
|
|
||||||
cd "$temp_dir"
|
|
||||||
tar zxf "cryptpad-${RELEASE}.tar.gz"
|
|
||||||
cp -rf "cryptpad-${RELEASE}"/* /opt/cryptpad
|
|
||||||
cd /opt/cryptpad
|
|
||||||
$STD npm ci
|
|
||||||
$STD npm run install:components
|
|
||||||
$STD npm run build
|
|
||||||
cp -f /opt/config.js /opt/cryptpad/config/config.js
|
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
|
||||||
msg_ok "Updated $APP to ${RELEASE}"
|
|
||||||
|
|
||||||
msg_info "Cleaning Up"
|
|
||||||
rm -rf $temp_dir
|
|
||||||
msg_ok "Cleanup Completed"
|
|
||||||
|
|
||||||
msg_info "Starting $APP"
|
|
||||||
systemctl start cryptpad
|
|
||||||
msg_ok "Started $APP"
|
|
||||||
|
|
||||||
msg_ok "Update Successful"
|
|
||||||
else
|
|
||||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
|
||||||
fi
|
|
||||||
exit
|
exit
|
||||||
|
fi
|
||||||
|
RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||||
|
if [[ "${RELEASE}" != "$(cat ~/.cryptpad 2>/dev/null)" ]] || [[ ! -f ~/.cryptpad ]]; then
|
||||||
|
msg_info "Stopping $APP"
|
||||||
|
systemctl stop cryptpad
|
||||||
|
msg_ok "Stopped $APP"
|
||||||
|
|
||||||
|
msg_info "Backing up configuration"
|
||||||
|
[ -f /opt/cryptpad/config/config.js ] && mv /opt/cryptpad/config/config.js /opt/
|
||||||
|
msg_ok "Backed up configuration"
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad"
|
||||||
|
|
||||||
|
msg_info "Updating $APP to ${RELEASE}"
|
||||||
|
cd /opt/cryptpad
|
||||||
|
$STD npm ci
|
||||||
|
$STD npm run install:components
|
||||||
|
$STD npm run build
|
||||||
|
msg_ok "Updated $APP to ${RELEASE}"
|
||||||
|
|
||||||
|
msg_info "Restoring configuration"
|
||||||
|
mv /opt/config.js /opt/cryptpad/config/
|
||||||
|
msg_ok "Configuration restored"
|
||||||
|
|
||||||
|
msg_info "Starting $APP"
|
||||||
|
systemctl start cryptpad
|
||||||
|
msg_ok "Started $APP"
|
||||||
|
|
||||||
|
msg_ok "Update Successful"
|
||||||
|
else
|
||||||
|
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||||
|
fi
|
||||||
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
10
ct/dashy.sh
10
ct/dashy.sh
@ -29,7 +29,7 @@ function update_script() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/Lissy93/dashy/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
|
RELEASE=$(curl -fsSL https://api.github.com/repos/Lissy93/dashy/releases/latest | grep '"tag_name":' | cut -d'"' -f4)
|
||||||
if [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]] || [[ ! -f /opt/${APP}_version.txt ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.dashy 2>/dev/null)" ]] || [[ ! -f ~/.dashy ]]; then
|
||||||
msg_info "Stopping ${APP}"
|
msg_info "Stopping ${APP}"
|
||||||
systemctl stop dashy
|
systemctl stop dashy
|
||||||
msg_ok "Stopped ${APP}"
|
msg_ok "Stopped ${APP}"
|
||||||
@ -43,14 +43,13 @@ function update_script() {
|
|||||||
fi
|
fi
|
||||||
msg_ok "Backed up conf.yml"
|
msg_ok "Backed up conf.yml"
|
||||||
|
|
||||||
msg_info "Updating ${APP} to ${RELEASE}"
|
|
||||||
rm -rf /opt/dashy
|
rm -rf /opt/dashy
|
||||||
mkdir -p /opt/dashy
|
fetch_and_deploy_gh_release "dashy" "Lissy93/dashy"
|
||||||
curl -fsSL "https://github.com/Lissy93/dashy/archive/refs/tags/${RELEASE}.tar.gz" | tar -xz -C /opt/dashy --strip-components=1
|
|
||||||
|
msg_info "Updating ${APP} to ${RELEASE}"
|
||||||
cd /opt/dashy
|
cd /opt/dashy
|
||||||
npm install
|
npm install
|
||||||
npm run build
|
npm run build
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
|
||||||
msg_ok "Updated ${APP} to ${RELEASE}"
|
msg_ok "Updated ${APP} to ${RELEASE}"
|
||||||
|
|
||||||
msg_info "Restoring conf.yml"
|
msg_info "Restoring conf.yml"
|
||||||
@ -65,6 +64,7 @@ function update_script() {
|
|||||||
msg_info "Starting Dashy"
|
msg_info "Starting Dashy"
|
||||||
systemctl start dashy
|
systemctl start dashy
|
||||||
msg_ok "Started Dashy"
|
msg_ok "Started Dashy"
|
||||||
|
|
||||||
msg_ok "Updated Successfully"
|
msg_ok "Updated Successfully"
|
||||||
else
|
else
|
||||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||||
|
@ -27,48 +27,35 @@ function update_script() {
|
|||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
if ! command -v node >/dev/null || [[ "$(/usr/bin/env node -v | grep -oP '^v\K[0-9]+')" != "22" ]]; then
|
if ! command -v node >/dev/null || [[ "$(/usr/bin/env node -v | grep -oP '^v\K[0-9]+')" != "22" ]]; then
|
||||||
msg_info "Installing Node.js 22"
|
NODE_VERSION="22" NODE_MODULE="pnpm@$(curl -s https://raw.githubusercontent.com/docmost/docmost/main/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
|
||||||
$STD apt-get purge -y nodejs
|
|
||||||
rm -f /etc/apt/sources.list.d/nodesource.list
|
|
||||||
rm -f /etc/apt/keyrings/nodesource.gpg
|
|
||||||
mkdir -p /etc/apt/keyrings
|
|
||||||
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
|
||||||
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_22.x nodistro main" >/etc/apt/sources.list.d/nodesource.list
|
|
||||||
$STD apt-get update
|
|
||||||
$STD apt-get install -y nodejs
|
|
||||||
$STD npm install -g pnpm@10.4.0
|
|
||||||
msg_ok "Node.js 22 installed"
|
|
||||||
fi
|
fi
|
||||||
export NODE_OPTIONS="--max_old_space_size=4096"
|
export NODE_OPTIONS="--max_old_space_size=4096"
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/docmost/docmost/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/docmost/docmost/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
if [[ ! -f /opt/${APP}_version.txt ]] || [[ "${RELEASE}" != "$(cat /opt/${APP}_version.txt)" ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.docmost 2>/dev/null)" ]] || [[ ! -f ~/.docmost ]]; then
|
||||||
msg_info "Stopping ${APP}"
|
msg_info "Stopping ${APP}"
|
||||||
systemctl stop docmost
|
systemctl stop docmost
|
||||||
msg_ok "${APP} Stopped"
|
msg_ok "${APP} Stopped"
|
||||||
|
|
||||||
msg_info "Updating ${APP} to v${RELEASE}"
|
msg_info "Backing up data"
|
||||||
cp /opt/docmost/.env /opt/
|
cp /opt/docmost/.env /opt/
|
||||||
cp -r /opt/docmost/data /opt/
|
cp -r /opt/docmost/data /opt/
|
||||||
rm -rf /opt/docmost
|
rm -rf /opt/docmost
|
||||||
temp_file=$(mktemp)
|
msg_ok "Data backed up"
|
||||||
curl -fsSL "https://github.com/docmost/docmost/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
|
||||||
tar -xzf "$temp_file"
|
fetch_and_deploy_gh_release "docmost" "docmost/docmost"
|
||||||
mv docmost-${RELEASE} /opt/docmost
|
|
||||||
|
msg_info "Updating ${APP} to v${RELEASE}"
|
||||||
cd /opt/docmost
|
cd /opt/docmost
|
||||||
mv /opt/.env /opt/docmost/.env
|
mv /opt/.env /opt/docmost/.env
|
||||||
mv /opt/data /opt/docmost/data
|
mv /opt/data /opt/docmost/data
|
||||||
$STD pnpm install --force
|
$STD pnpm install --force
|
||||||
$STD pnpm build
|
$STD pnpm build
|
||||||
echo "${RELEASE}" >/opt/${APP}_version.txt
|
|
||||||
msg_ok "Updated ${APP}"
|
msg_ok "Updated ${APP}"
|
||||||
|
|
||||||
msg_info "Starting ${APP}"
|
msg_info "Starting ${APP}"
|
||||||
systemctl start docmost
|
systemctl start docmost
|
||||||
msg_ok "Started ${APP}"
|
msg_ok "Started ${APP}"
|
||||||
|
|
||||||
msg_info "Cleaning Up"
|
|
||||||
rm -f ${temp_file}
|
|
||||||
msg_ok "Cleaned"
|
|
||||||
msg_ok "Updated Successfully"
|
msg_ok "Updated Successfully"
|
||||||
else
|
else
|
||||||
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
msg_ok "No update required. ${APP} is already at ${RELEASE}"
|
||||||
|
@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
|||||||
# Copyright (c) 2021-2025 community-scripts ORG
|
# Copyright (c) 2021-2025 community-scripts ORG
|
||||||
# Author: CrazyWolf13
|
# Author: CrazyWolf13
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/arunavo4/gitea-mirror
|
# Source: https://github.com/RayLabsHQ/gitea-mirror
|
||||||
|
|
||||||
APP="gitea-mirror"
|
APP="gitea-mirror"
|
||||||
var_tags="${var_tags:-mirror;gitea}"
|
var_tags="${var_tags:-mirror;gitea}"
|
||||||
@ -28,7 +28,7 @@ function update_script() {
|
|||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/RayLabsHQ/gitea-mirror/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
if [[ "${RELEASE}" != "$(cat ~/.${APP} 2>/dev/null || cat /opt/${APP}_version.txt 2>/dev/null)" ]]; then
|
if [[ "${RELEASE}" != "$(cat ~/.${APP} 2>/dev/null || cat /opt/${APP}_version.txt 2>/dev/null)" ]]; then
|
||||||
|
|
||||||
msg_info "Stopping Services"
|
msg_info "Stopping Services"
|
||||||
@ -48,7 +48,7 @@ function update_script() {
|
|||||||
msg_ok "Installed Bun"
|
msg_ok "Installed Bun"
|
||||||
|
|
||||||
rm -rf /opt/gitea-mirror
|
rm -rf /opt/gitea-mirror
|
||||||
fetch_and_deploy_gh_release "gitea-mirror" "arunavo4/gitea-mirror"
|
fetch_and_deploy_gh_release "gitea-mirror" "RayLabsHQ/gitea-mirror"
|
||||||
|
|
||||||
msg_info "Updating and rebuilding ${APP} to v${RELEASE}"
|
msg_info "Updating and rebuilding ${APP} to v${RELEASE}"
|
||||||
cd /opt/gitea-mirror
|
cd /opt/gitea-mirror
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
______ _ ___ _ __ __
|
______ _ ___
|
||||||
/_ __/____(_) (_)_ ______ ___ / | / /___ / /____ _____
|
/_ __/____(_) (_)_ ______ ___
|
||||||
/ / / ___/ / / / / / / __ `__ \ / |/ / __ \/ __/ _ \/ ___/
|
/ / / ___/ / / / / / / __ `__ \
|
||||||
/ / / / / / / / /_/ / / / / / / / /| / /_/ / /_/ __(__ )
|
/ / / / / / / / /_/ / / / / / /
|
||||||
/_/ /_/ /_/_/_/\__,_/_/ /_/ /_/ /_/ |_/\____/\__/\___/____/
|
/_/ /_/ /_/_/_/\__,_/_/ /_/ /_/
|
||||||
|
|
||||||
|
35
ct/mafl.sh
35
ct/mafl.sh
@ -27,18 +27,31 @@ function update_script() {
|
|||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
msg_info "Updating Mafl to v${RELEASE} (Patience)"
|
if [[ "${RELEASE}" != "$(cat ~/.mafl 2>/dev/null)" ]] || [[ ! -f ~/.mafl ]]; then
|
||||||
systemctl stop mafl
|
msg_info "Stopping Mafl service"
|
||||||
curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o $(basename "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz")
|
systemctl stop mafl
|
||||||
tar -xzf v${RELEASE}.tar.gz
|
msg_ok "Service stopped"
|
||||||
cp -r mafl-${RELEASE}/* /opt/mafl/
|
|
||||||
rm -rf mafl-${RELEASE}
|
msg_info "Performing backup"
|
||||||
cd /opt/mafl
|
mkdir -p /opt/mafl-backup/data
|
||||||
yarn install
|
mv /opt/mafl/data /opt/mafl-backup/data
|
||||||
yarn build
|
rm -rf /opt/mafl
|
||||||
systemctl start mafl
|
msg_ok "Backup complete"
|
||||||
msg_ok "Updated Mafl to v${RELEASE}"
|
|
||||||
|
fetch_and_deploy_gh_release "mafl" "hywax/mafl"
|
||||||
|
|
||||||
|
msg_info "Updating Mafl to v${RELEASE}"
|
||||||
|
cd /opt/mafl
|
||||||
|
yarn install
|
||||||
|
yarn build
|
||||||
|
mv /opt/mafl-backup/data /opt/mafl/data
|
||||||
|
systemctl start mafl
|
||||||
|
msg_ok "Updated Mafl to v${RELEASE}"
|
||||||
|
else
|
||||||
|
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||||
|
fi
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,4 +86,4 @@ msg_ok "Completed Successfully!\n"
|
|||||||
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
echo -e "${INFO}${YW} Configure your reverse proxy to point to:${BGN} ${IP}:1411${CL}"
|
echo -e "${INFO}${YW} Configure your reverse proxy to point to:${BGN} ${IP}:1411${CL}"
|
||||||
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
echo -e "${TAB}${GATEWAY}${BGN}https://{PUBLIC_URL}/login/setup${CL}"
|
echo -e "${TAB}${GATEWAY}${BGN}https://{PUBLIC_URL}/setup${CL}"
|
||||||
|
@ -58,6 +58,7 @@ function update_script() {
|
|||||||
else
|
else
|
||||||
msg_ok "No update required. ${APP} is already at ${RELEASE}."
|
msg_ok "No update required. ${APP} is already at ${RELEASE}."
|
||||||
fi
|
fi
|
||||||
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
@ -38,10 +38,12 @@ function update_script() {
|
|||||||
tar -xzf v$RELEASE.tar.gz
|
tar -xzf v$RELEASE.tar.gz
|
||||||
cd Stirling-PDF-$RELEASE
|
cd Stirling-PDF-$RELEASE
|
||||||
chmod +x ./gradlew
|
chmod +x ./gradlew
|
||||||
$STD ./gradlew build
|
$STD ./gradlew build -x spotlessApply -x spotlessCheck -x test -x sonarqube
|
||||||
rm -rf /opt/Stirling-PDF/Stirling-PDF-*.jar
|
rm -rf /opt/Stirling-PDF/Stirling-PDF-*.jar
|
||||||
cp -r ./build/libs/Stirling-PDF-*.jar /opt/Stirling-PDF/
|
cp -r ./stirling-pdf/build/libs/*.jar /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar
|
||||||
cp -r scripts /opt/Stirling-PDF/
|
cp -r scripts /opt/Stirling-PDF/
|
||||||
|
cp -r pipeline /opt/Stirling-PDF/
|
||||||
|
cp -r stirling-pdf/src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
|
||||||
cd ~
|
cd ~
|
||||||
rm -rf Stirling-PDF-$RELEASE v$RELEASE.tar.gz
|
rm -rf Stirling-PDF-$RELEASE v$RELEASE.tar.gz
|
||||||
ln -sf /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
|
ln -sf /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
|
||||||
|
@ -27,12 +27,24 @@ function update_script() {
|
|||||||
msg_error "No ${APP} Installation Found!"
|
msg_error "No ${APP} Installation Found!"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
msg_info "Updating $APP"
|
|
||||||
systemctl stop threadfin.service
|
RELEASE=$(curl -fsSL https://api.github.com/repos/threadfin/threadfin/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
||||||
curl -fsSL "https://github.com/Threadfin/Threadfin/releases/latest/download/Threadfin_linux_amd64" -o "/opt/threadfin/threadfin"
|
if [[ "${RELEASE}" != "$(cat ~/.threadfin 2>/dev/null)" ]] || [[ ! -f ~/.threadfin ]]; then
|
||||||
chmod +x /opt/threadfin/threadfin
|
|
||||||
systemctl start threadfin.service
|
msg_info "Stopping $APP"
|
||||||
msg_ok "Updated $APP"
|
systemctl stop threadfin
|
||||||
|
msg_ok "Stopped $APP"
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
|
||||||
|
|
||||||
|
msg_info "Starting $APP"
|
||||||
|
systemctl start threadfin
|
||||||
|
msg_ok "Started $APP"
|
||||||
|
|
||||||
|
msg_ok "Updated Successfully"
|
||||||
|
else
|
||||||
|
msg_ok "No update required. ${APP} is already at v${RELEASE}"
|
||||||
|
fi
|
||||||
exit
|
exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
|||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/TriliumNext/Trilium
|
# Source: https://github.com/TriliumNext/Trilium
|
||||||
|
|
||||||
APP="Trilium Notes"
|
APP="Trilium"
|
||||||
var_tags="${var_tags:-notes}"
|
var_tags="${var_tags:-notes}"
|
||||||
var_cpu="${var_cpu:-1}"
|
var_cpu="${var_cpu:-1}"
|
||||||
var_ram="${var_ram:-512}"
|
var_ram="${var_ram:-512}"
|
||||||
|
6
frontend/public/json/add-iptag.json
generated
6
frontend/public/json/add-iptag.json
generated
@ -37,12 +37,16 @@
|
|||||||
"type": "info"
|
"type": "info"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"text": "Configuration: `nano /opt/iptag/iptag.conf`. iptag.service must be restarted after change.",
|
"text": "Configuration: `nano /opt/iptag/iptag.conf`. iptag Service must be restarted after change. See here for full documentation: `https://github.com/community-scripts/ProxmoxVE/discussions/5790`",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"text": "The Proxmox Node must contain ipcalc and net-tools. `apt-get install -y ipcalc net-tools`",
|
"text": "The Proxmox Node must contain ipcalc and net-tools. `apt-get install -y ipcalc net-tools`",
|
||||||
"type": "warning"
|
"type": "warning"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "You can execute the ip tool manually with `iptag-run`",
|
||||||
|
"type": "info"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
7
frontend/public/json/bunkerweb.json
generated
7
frontend/public/json/bunkerweb.json
generated
@ -31,5 +31,10 @@
|
|||||||
"username": null,
|
"username": null,
|
||||||
"password": null
|
"password": null
|
||||||
},
|
},
|
||||||
"notes": []
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "WARNING: Installation sources scripts outside of Community Scripts repo. Please check the source before installing.",
|
||||||
|
"type": "warning"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
2
frontend/public/json/dockge.json
generated
2
frontend/public/json/dockge.json
generated
@ -6,7 +6,7 @@
|
|||||||
],
|
],
|
||||||
"date_created": "2024-05-02",
|
"date_created": "2024-05-02",
|
||||||
"type": "ct",
|
"type": "ct",
|
||||||
"updateable": false,
|
"updateable": true,
|
||||||
"privileged": false,
|
"privileged": false,
|
||||||
"interface_port": 5001,
|
"interface_port": 5001,
|
||||||
"documentation": null,
|
"documentation": null,
|
||||||
|
7
frontend/public/json/docmost.json
generated
7
frontend/public/json/docmost.json
generated
@ -31,5 +31,10 @@
|
|||||||
"username": null,
|
"username": null,
|
||||||
"password": null
|
"password": null
|
||||||
},
|
},
|
||||||
"notes": []
|
"notes": [
|
||||||
|
{
|
||||||
|
"text": "Use `cat ~/docmost.creds` to see database credentials.",
|
||||||
|
"type": "info"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
4
frontend/public/json/emqx.json
generated
4
frontend/public/json/emqx.json
generated
@ -35,6 +35,10 @@
|
|||||||
{
|
{
|
||||||
"text": "Setup-Steps: Access Control ➡ Authentication ➡ Create ➡ Next ➡ Next ➡ Create ➡ Users ➡ Add ➡ Username / Password (to authenicate with MQTT) ➡ Save. You're now ready to enjoy a high-performance MQTT Broker.",
|
"text": "Setup-Steps: Access Control ➡ Authentication ➡ Create ➡ Next ➡ Next ➡ Create ➡ Users ➡ Add ➡ Username / Password (to authenicate with MQTT) ➡ Save. You're now ready to enjoy a high-performance MQTT Broker.",
|
||||||
"type": "info"
|
"type": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "WARNING: Installation sources scripts outside of Community Scripts repo. Please check the source before installing.",
|
||||||
|
"type": "warning"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
4
frontend/public/json/gitea-mirror.json
generated
4
frontend/public/json/gitea-mirror.json
generated
@ -9,9 +9,9 @@
|
|||||||
"updateable": true,
|
"updateable": true,
|
||||||
"privileged": false,
|
"privileged": false,
|
||||||
"interface_port": 4321,
|
"interface_port": 4321,
|
||||||
"documentation": "https://github.com/arunavo4/gitea-mirror/",
|
"documentation": "https://github.com/RayLabsHQ/gitea-mirror/",
|
||||||
"config_path": "/etc/systemd/system/gitea-mirror.service",
|
"config_path": "/etc/systemd/system/gitea-mirror.service",
|
||||||
"website": "https://github.com/arunavo4/gitea-mirror/",
|
"website": "https://github.com/RayLabsHQ/gitea-mirror/",
|
||||||
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/gitea-mirror.webp",
|
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons/webp/gitea-mirror.webp",
|
||||||
"description": "Gitea Mirror auto-syncs GitHub repos to your self-hosted Gitea, with a sleek Web UI and easy Docker deployment. ",
|
"description": "Gitea Mirror auto-syncs GitHub repos to your self-hosted Gitea, with a sleek Web UI and easy Docker deployment. ",
|
||||||
"install_methods": [
|
"install_methods": [
|
||||||
|
4
frontend/public/json/proxmox-backup-server.json
generated
4
frontend/public/json/proxmox-backup-server.json
generated
@ -35,6 +35,10 @@
|
|||||||
{
|
{
|
||||||
"text": "Set a root password if using autologin. This will be the PBS password. `passwd root`",
|
"text": "Set a root password if using autologin. This will be the PBS password. `passwd root`",
|
||||||
"type": "warning"
|
"type": "warning"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Advanced Install is only possible without root password and root SSH access, you can configure this after installation.",
|
||||||
|
"type": "warning"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
2
frontend/public/json/threadfin.json
generated
2
frontend/public/json/threadfin.json
generated
@ -6,7 +6,7 @@
|
|||||||
],
|
],
|
||||||
"date_created": "2024-06-12",
|
"date_created": "2024-06-12",
|
||||||
"type": "ct",
|
"type": "ct",
|
||||||
"updateable": false,
|
"updateable": true,
|
||||||
"privileged": false,
|
"privileged": false,
|
||||||
"interface_port": 34400,
|
"interface_port": 34400,
|
||||||
"documentation": null,
|
"documentation": null,
|
||||||
|
412
frontend/public/json/versions.json
generated
412
frontend/public/json/versions.json
generated
@ -1,8 +1,213 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"name": "VictoriaMetrics/VictoriaMetrics",
|
||||||
|
"version": "v1.121.0",
|
||||||
|
"date": "2025-07-07T11:32:39Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "meilisearch/meilisearch",
|
||||||
|
"version": "prototype-incremental-vector-store-3",
|
||||||
|
"date": "2025-07-07T10:27:19Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Paymenter/Paymenter",
|
||||||
|
"version": "v1.2.1",
|
||||||
|
"date": "2025-07-07T10:11:26Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Checkmk/checkmk",
|
||||||
|
"version": "v2.4.0p7-rc1",
|
||||||
|
"date": "2025-07-07T09:25:01Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "nzbgetcom/nzbget",
|
||||||
|
"version": "v25.2",
|
||||||
|
"date": "2025-07-04T08:21:42Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "zwave-js/zwave-js-ui",
|
||||||
|
"version": "v10.8.0",
|
||||||
|
"date": "2025-07-07T08:37:45Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "morpheus65535/bazarr",
|
||||||
|
"version": "v1.5.2",
|
||||||
|
"date": "2025-05-11T16:40:55Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jackett/Jackett",
|
||||||
|
"version": "v0.22.2125",
|
||||||
|
"date": "2025-07-07T05:56:33Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "mattermost/mattermost",
|
||||||
|
"version": "preview-v0.1",
|
||||||
|
"date": "2025-06-27T14:35:47Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "MediaBrowser/Emby.Releases",
|
||||||
|
"version": "4.9.1.2",
|
||||||
|
"date": "2025-06-26T22:08:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "firefly-iii/firefly-iii",
|
||||||
|
"version": "v6.2.20",
|
||||||
|
"date": "2025-07-02T04:03:37Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "steveiliop56/tinyauth",
|
||||||
|
"version": "v3.4.1",
|
||||||
|
"date": "2025-06-11T07:53:44Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "slskd/slskd",
|
||||||
|
"version": "0.23.1",
|
||||||
|
"date": "2025-07-06T23:57:52Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pelican-dev/panel",
|
||||||
|
"version": "v1.0.0-beta22",
|
||||||
|
"date": "2025-07-06T21:16:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pelican-dev/wings",
|
||||||
|
"version": "v1.0.0-beta14",
|
||||||
|
"date": "2025-07-06T21:07:07Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pocket-id/pocket-id",
|
||||||
|
"version": "v1.6.1",
|
||||||
|
"date": "2025-07-06T20:59:34Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Luligu/matterbridge",
|
||||||
|
"version": "3.1.2",
|
||||||
|
"date": "2025-07-06T20:55:23Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "bluenviron/mediamtx",
|
||||||
|
"version": "v1.13.0",
|
||||||
|
"date": "2025-07-06T19:23:55Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "syncthing/syncthing",
|
||||||
|
"version": "v1.30.0",
|
||||||
|
"date": "2025-07-01T11:29:11Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "traccar/traccar",
|
||||||
|
"version": "v6.8.0",
|
||||||
|
"date": "2025-07-06T18:19:05Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "msgbyte/tianji",
|
||||||
|
"version": "v1.23.0",
|
||||||
|
"date": "2025-07-06T16:01:58Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "TandoorRecipes/recipes",
|
||||||
|
"version": "1.5.35",
|
||||||
|
"date": "2025-06-22T08:30:10Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Part-DB/Part-DB-server",
|
||||||
|
"version": "v1.17.2",
|
||||||
|
"date": "2025-07-06T12:21:52Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "redis/redis",
|
||||||
|
"version": "8.0.3",
|
||||||
|
"date": "2025-07-06T12:19:24Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fallenbagel/jellyseerr",
|
||||||
|
"version": "preview-OIDC",
|
||||||
|
"date": "2025-07-06T00:51:06Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "hyperion-project/hyperion.ng",
|
||||||
|
"version": "2.1.1",
|
||||||
|
"date": "2025-06-14T17:45:06Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Kareadita/Kavita",
|
||||||
|
"version": "v0.8.7",
|
||||||
|
"date": "2025-07-05T20:08:58Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "cross-seed/cross-seed",
|
||||||
|
"version": "v6.12.7",
|
||||||
|
"date": "2025-06-18T03:44:24Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "nicolargo/glances",
|
||||||
|
"version": "v4.3.2",
|
||||||
|
"date": "2025-07-05T16:00:15Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "runtipi/runtipi",
|
||||||
|
"version": "v4.3.0",
|
||||||
|
"date": "2025-07-05T12:14:52Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "fuma-nama/fumadocs",
|
||||||
|
"version": "fumadocs-openapi@9.0.18",
|
||||||
|
"date": "2025-07-05T09:36:45Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "theonedev/onedev",
|
||||||
|
"version": "v11.11.4",
|
||||||
|
"date": "2025-07-05T09:23:25Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "linkwarden/linkwarden",
|
||||||
|
"version": "v2.11.3",
|
||||||
|
"date": "2025-07-05T04:34:46Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "home-assistant/core",
|
||||||
|
"version": "2025.7.1",
|
||||||
|
"date": "2025-07-04T20:02:52Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "homarr-labs/homarr",
|
||||||
|
"version": "v1.27.0",
|
||||||
|
"date": "2025-07-04T19:16:16Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "zitadel/zitadel",
|
||||||
|
"version": "v3.3.0",
|
||||||
|
"date": "2025-06-12T06:54:48Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "bunkerity/bunkerweb",
|
||||||
|
"version": "v1.6.2",
|
||||||
|
"date": "2025-07-04T15:21:18Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "emqx/emqx",
|
||||||
|
"version": "e6.0.0-M1.202507-alpha.1",
|
||||||
|
"date": "2025-07-04T14:58:23Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "kimai/kimai",
|
||||||
|
"version": "2.37.0",
|
||||||
|
"date": "2025-07-04T14:49:43Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "keycloak/keycloak",
|
||||||
|
"version": "26.3.0",
|
||||||
|
"date": "2025-07-02T12:26:44Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Graylog2/graylog2-server",
|
||||||
|
"version": "6.3.1",
|
||||||
|
"date": "2025-07-04T11:20:48Z"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "outline/outline",
|
"name": "outline/outline",
|
||||||
"version": "v0.85.0",
|
"version": "v0.85.0",
|
||||||
"date": "2025-07-03T23:31:00Z"
|
"date": "2025-07-04T00:06:47Z"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "home-assistant/operating-system",
|
"name": "home-assistant/operating-system",
|
||||||
@ -19,16 +224,6 @@
|
|||||||
"version": "v4.1.2",
|
"version": "v4.1.2",
|
||||||
"date": "2025-07-03T16:59:29Z"
|
"date": "2025-07-03T16:59:29Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "bunkerity/bunkerweb",
|
|
||||||
"version": "v1.6.1",
|
|
||||||
"date": "2025-03-15T17:29:17Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Checkmk/checkmk",
|
|
||||||
"version": "v2.4.0p6",
|
|
||||||
"date": "2025-07-03T16:40:42Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "influxdata/influxdb",
|
"name": "influxdata/influxdb",
|
||||||
"version": "v3.2.1",
|
"version": "v3.2.1",
|
||||||
@ -49,21 +244,6 @@
|
|||||||
"version": "18.0.7",
|
"version": "18.0.7",
|
||||||
"date": "2025-07-03T08:57:21Z"
|
"date": "2025-07-03T08:57:21Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "fuma-nama/fumadocs",
|
|
||||||
"version": "fumadocs-openapi@9.0.17",
|
|
||||||
"date": "2025-07-03T06:57:48Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "mattermost/mattermost",
|
|
||||||
"version": "preview-v0.1",
|
|
||||||
"date": "2025-06-27T14:35:47Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Jackett/Jackett",
|
|
||||||
"version": "v0.22.2111",
|
|
||||||
"date": "2025-07-03T05:50:31Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "esphome/esphome",
|
"name": "esphome/esphome",
|
||||||
"version": "2025.6.3",
|
"version": "2025.6.3",
|
||||||
@ -84,16 +264,6 @@
|
|||||||
"version": "v1.12.2-rc.0",
|
"version": "v1.12.2-rc.0",
|
||||||
"date": "2025-07-03T00:31:22Z"
|
"date": "2025-07-03T00:31:22Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "cross-seed/cross-seed",
|
|
||||||
"version": "v6.12.7",
|
|
||||||
"date": "2025-06-18T03:44:24Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "emqx/emqx",
|
|
||||||
"version": "v5.8.7",
|
|
||||||
"date": "2025-07-02T21:54:54Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "hargata/lubelog",
|
"name": "hargata/lubelog",
|
||||||
"version": "v1.4.8",
|
"version": "v1.4.8",
|
||||||
@ -104,41 +274,11 @@
|
|||||||
"version": "2.5.1",
|
"version": "2.5.1",
|
||||||
"date": "2025-07-02T19:38:06Z"
|
"date": "2025-07-02T19:38:06Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "redis/redis",
|
|
||||||
"version": "8.2-rc1-int",
|
|
||||||
"date": "2025-07-02T19:27:08Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "keycloak/keycloak",
|
|
||||||
"version": "26.3.0",
|
|
||||||
"date": "2025-07-02T12:26:44Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "ollama/ollama",
|
"name": "ollama/ollama",
|
||||||
"version": "v0.9.5",
|
"version": "v0.9.5",
|
||||||
"date": "2025-07-02T18:39:28Z"
|
"date": "2025-07-02T18:39:28Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "firefly-iii/firefly-iii",
|
|
||||||
"version": "v6.2.20",
|
|
||||||
"date": "2025-07-02T04:03:37Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "home-assistant/core",
|
|
||||||
"version": "2025.7.0",
|
|
||||||
"date": "2025-07-02T16:23:42Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "nzbgetcom/nzbget",
|
|
||||||
"version": "v25.1",
|
|
||||||
"date": "2025-06-27T09:14:14Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Graylog2/graylog2-server",
|
|
||||||
"version": "6.2.5",
|
|
||||||
"date": "2025-07-02T13:06:30Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "wazuh/wazuh",
|
"name": "wazuh/wazuh",
|
||||||
"version": "coverity-w27-4.13.0",
|
"version": "coverity-w27-4.13.0",
|
||||||
@ -169,11 +309,6 @@
|
|||||||
"version": "v0.20.2",
|
"version": "v0.20.2",
|
||||||
"date": "2025-07-02T00:37:07Z"
|
"date": "2025-07-02T00:37:07Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "hyperion-project/hyperion.ng",
|
|
||||||
"version": "2.1.1",
|
|
||||||
"date": "2025-06-14T17:45:06Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "Threadfin/Threadfin",
|
"name": "Threadfin/Threadfin",
|
||||||
"version": "1.2.35",
|
"version": "1.2.35",
|
||||||
@ -209,11 +344,6 @@
|
|||||||
"version": "v1.133.0",
|
"version": "v1.133.0",
|
||||||
"date": "2025-07-01T15:13:42Z"
|
"date": "2025-07-01T15:13:42Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "syncthing/syncthing",
|
|
||||||
"version": "v1.30.0",
|
|
||||||
"date": "2025-07-01T11:29:11Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "rcourtman/Pulse",
|
"name": "rcourtman/Pulse",
|
||||||
"version": "v99.99.99",
|
"version": "v99.99.99",
|
||||||
@ -279,26 +409,6 @@
|
|||||||
"version": "0.50.5",
|
"version": "0.50.5",
|
||||||
"date": "2025-06-29T08:54:47Z"
|
"date": "2025-06-29T08:54:47Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "theonedev/onedev",
|
|
||||||
"version": "v11.11.2",
|
|
||||||
"date": "2025-06-29T01:40:39Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "linkwarden/linkwarden",
|
|
||||||
"version": "v2.11.2",
|
|
||||||
"date": "2025-06-28T17:33:38Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "msgbyte/tianji",
|
|
||||||
"version": "v1.22.5",
|
|
||||||
"date": "2025-06-28T16:06:19Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Luligu/matterbridge",
|
|
||||||
"version": "3.1.0",
|
|
||||||
"date": "2025-06-28T09:02:38Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "plexguide/Huntarr.io",
|
"name": "plexguide/Huntarr.io",
|
||||||
"version": "8.1.11",
|
"version": "8.1.11",
|
||||||
@ -309,16 +419,6 @@
|
|||||||
"version": "v3.2.4",
|
"version": "v3.2.4",
|
||||||
"date": "2025-06-28T02:47:31Z"
|
"date": "2025-06-28T02:47:31Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "pocket-id/pocket-id",
|
|
||||||
"version": "v1.5.0",
|
|
||||||
"date": "2025-06-27T22:04:32Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "homarr-labs/homarr",
|
|
||||||
"version": "v1.26.0",
|
|
||||||
"date": "2025-06-27T19:15:24Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "goauthentik/authentik",
|
"name": "goauthentik/authentik",
|
||||||
"version": "version/2025.6.3",
|
"version": "version/2025.6.3",
|
||||||
@ -339,16 +439,6 @@
|
|||||||
"version": "flowise@3.0.3",
|
"version": "flowise@3.0.3",
|
||||||
"date": "2025-06-27T09:53:57Z"
|
"date": "2025-06-27T09:53:57Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "fallenbagel/jellyseerr",
|
|
||||||
"version": "preview-seerr",
|
|
||||||
"date": "2025-06-27T06:10:03Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "MediaBrowser/Emby.Releases",
|
|
||||||
"version": "4.9.1.2",
|
|
||||||
"date": "2025-06-26T22:08:00Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "netbox-community/netbox",
|
"name": "netbox-community/netbox",
|
||||||
"version": "v4.3.3",
|
"version": "v4.3.3",
|
||||||
@ -369,11 +459,6 @@
|
|||||||
"version": "v3.5.0-rc1",
|
"version": "v3.5.0-rc1",
|
||||||
"date": "2025-06-26T15:08:43Z"
|
"date": "2025-06-26T15:08:43Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "meilisearch/meilisearch",
|
|
||||||
"version": "prototype-no-simd-x86-arroy-0",
|
|
||||||
"date": "2025-06-26T14:54:18Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "AdguardTeam/AdGuardHome",
|
"name": "AdguardTeam/AdGuardHome",
|
||||||
"version": "v0.107.63",
|
"version": "v0.107.63",
|
||||||
@ -429,16 +514,6 @@
|
|||||||
"version": "RELEASE.2025-06-13T11-33-47Z",
|
"version": "RELEASE.2025-06-13T11-33-47Z",
|
||||||
"date": "2025-06-23T20:58:42Z"
|
"date": "2025-06-23T20:58:42Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "runtipi/runtipi",
|
|
||||||
"version": "v4.2.1",
|
|
||||||
"date": "2025-06-03T20:04:28Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "VictoriaMetrics/VictoriaMetrics",
|
|
||||||
"version": "pmm-6401-v1.120.0",
|
|
||||||
"date": "2025-06-23T15:12:12Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "gotson/komga",
|
"name": "gotson/komga",
|
||||||
"version": "1.22.0",
|
"version": "1.22.0",
|
||||||
@ -454,11 +529,6 @@
|
|||||||
"version": "v3.0.7",
|
"version": "v3.0.7",
|
||||||
"date": "2025-06-22T17:49:29Z"
|
"date": "2025-06-22T17:49:29Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "TandoorRecipes/recipes",
|
|
||||||
"version": "1.5.35",
|
|
||||||
"date": "2025-06-22T08:30:10Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "inventree/InvenTree",
|
"name": "inventree/InvenTree",
|
||||||
"version": "0.17.14",
|
"version": "0.17.14",
|
||||||
@ -544,11 +614,6 @@
|
|||||||
"version": "v5.6.0",
|
"version": "v5.6.0",
|
||||||
"date": "2025-06-18T12:19:54Z"
|
"date": "2025-06-18T12:19:54Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "zwave-js/zwave-js-ui",
|
|
||||||
"version": "v10.7.0",
|
|
||||||
"date": "2025-06-18T11:57:05Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "forgejo/forgejo",
|
"name": "forgejo/forgejo",
|
||||||
"version": "v11.0.2",
|
"version": "v11.0.2",
|
||||||
@ -579,11 +644,6 @@
|
|||||||
"version": "v1.6.9",
|
"version": "v1.6.9",
|
||||||
"date": "2025-06-17T11:54:50Z"
|
"date": "2025-06-17T11:54:50Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "morpheus65535/bazarr",
|
|
||||||
"version": "v1.5.2",
|
|
||||||
"date": "2025-05-11T16:40:55Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "donaldzou/WGDashboard",
|
"name": "donaldzou/WGDashboard",
|
||||||
"version": "v4.2.4",
|
"version": "v4.2.4",
|
||||||
@ -594,11 +654,6 @@
|
|||||||
"version": "2.402",
|
"version": "2.402",
|
||||||
"date": "2025-06-17T05:20:42Z"
|
"date": "2025-06-17T05:20:42Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "kimai/kimai",
|
|
||||||
"version": "2.36.1",
|
|
||||||
"date": "2025-06-16T19:20:54Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "open-webui/open-webui",
|
"name": "open-webui/open-webui",
|
||||||
"version": "v0.6.15",
|
"version": "v0.6.15",
|
||||||
@ -644,11 +699,6 @@
|
|||||||
"version": "v5.26.2.10099",
|
"version": "v5.26.2.10099",
|
||||||
"date": "2025-06-11T20:10:39Z"
|
"date": "2025-06-11T20:10:39Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "traccar/traccar",
|
|
||||||
"version": "v6.7.3",
|
|
||||||
"date": "2025-06-15T05:46:17Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "advplyr/audiobookshelf",
|
"name": "advplyr/audiobookshelf",
|
||||||
"version": "v2.25.1",
|
"version": "v2.25.1",
|
||||||
@ -669,21 +719,11 @@
|
|||||||
"version": "v2025-06-12",
|
"version": "v2025-06-12",
|
||||||
"date": "2025-06-12T20:59:47Z"
|
"date": "2025-06-12T20:59:47Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "zitadel/zitadel",
|
|
||||||
"version": "v3.3.0",
|
|
||||||
"date": "2025-06-12T06:54:48Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "autobrr/autobrr",
|
"name": "autobrr/autobrr",
|
||||||
"version": "v1.63.1",
|
"version": "v1.63.1",
|
||||||
"date": "2025-06-11T11:05:42Z"
|
"date": "2025-06-11T11:05:42Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "steveiliop56/tinyauth",
|
|
||||||
"version": "v3.4.1",
|
|
||||||
"date": "2025-06-11T07:53:44Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "OctoPrint/OctoPrint",
|
"name": "OctoPrint/OctoPrint",
|
||||||
"version": "1.11.2",
|
"version": "1.11.2",
|
||||||
@ -814,11 +854,6 @@
|
|||||||
"version": "5.10.0",
|
"version": "5.10.0",
|
||||||
"date": "2025-05-28T05:48:20Z"
|
"date": "2025-05-28T05:48:20Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "bluenviron/mediamtx",
|
|
||||||
"version": "v1.12.3",
|
|
||||||
"date": "2025-05-27T20:43:10Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "dani-garcia/vaultwarden",
|
"name": "dani-garcia/vaultwarden",
|
||||||
"version": "1.34.1",
|
"version": "1.34.1",
|
||||||
@ -854,11 +889,6 @@
|
|||||||
"version": "v0.46.2",
|
"version": "v0.46.2",
|
||||||
"date": "2025-05-20T11:21:04Z"
|
"date": "2025-05-20T11:21:04Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "Part-DB/Part-DB-server",
|
|
||||||
"version": "v1.17.1",
|
|
||||||
"date": "2025-05-18T21:06:41Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "sbondCo/Watcharr",
|
"name": "sbondCo/Watcharr",
|
||||||
"version": "v2.1.0",
|
"version": "v2.1.0",
|
||||||
@ -879,11 +909,6 @@
|
|||||||
"version": "v25.05.2",
|
"version": "v25.05.2",
|
||||||
"date": "2025-05-17T12:53:29Z"
|
"date": "2025-05-17T12:53:29Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "Paymenter/Paymenter",
|
|
||||||
"version": "v1.1.1",
|
|
||||||
"date": "2025-05-17T10:10:36Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "Ombi-app/Ombi",
|
"name": "Ombi-app/Ombi",
|
||||||
"version": "v4.47.1",
|
"version": "v4.47.1",
|
||||||
@ -909,16 +934,6 @@
|
|||||||
"version": "v0.2.3",
|
"version": "v0.2.3",
|
||||||
"date": "2025-05-10T21:14:45Z"
|
"date": "2025-05-10T21:14:45Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "pelican-dev/wings",
|
|
||||||
"version": "v1.0.0-beta13",
|
|
||||||
"date": "2025-05-09T23:14:41Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "pelican-dev/panel",
|
|
||||||
"version": "v1.0.0-beta21",
|
|
||||||
"date": "2025-05-09T23:14:23Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "getumbrel/umbrel",
|
"name": "getumbrel/umbrel",
|
||||||
"version": "1.4.2",
|
"version": "1.4.2",
|
||||||
@ -1009,11 +1024,6 @@
|
|||||||
"version": "v4.3.0",
|
"version": "v4.3.0",
|
||||||
"date": "2025-04-21T17:44:40Z"
|
"date": "2025-04-21T17:44:40Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "Kareadita/Kavita",
|
|
||||||
"version": "v0.8.6.2",
|
|
||||||
"date": "2025-04-20T16:55:38Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "caddyserver/caddy",
|
"name": "caddyserver/caddy",
|
||||||
"version": "v2.10.0",
|
"version": "v2.10.0",
|
||||||
@ -1024,11 +1034,6 @@
|
|||||||
"version": "v0.4.15",
|
"version": "v0.4.15",
|
||||||
"date": "2024-12-19T03:19:49Z"
|
"date": "2024-12-19T03:19:49Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "slskd/slskd",
|
|
||||||
"version": "0.22.5",
|
|
||||||
"date": "2025-04-15T02:52:26Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "Tautulli/Tautulli",
|
"name": "Tautulli/Tautulli",
|
||||||
"version": "v2.15.2",
|
"version": "v2.15.2",
|
||||||
@ -1109,11 +1114,6 @@
|
|||||||
"version": "v2.5.307",
|
"version": "v2.5.307",
|
||||||
"date": "2025-03-24T01:33:31Z"
|
"date": "2025-03-24T01:33:31Z"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "nicolargo/glances",
|
|
||||||
"version": "v4.3.1",
|
|
||||||
"date": "2025-03-23T09:02:54Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "Donkie/Spoolman",
|
"name": "Donkie/Spoolman",
|
||||||
"version": "v0.22.1",
|
"version": "v0.22.1",
|
||||||
|
@ -14,11 +14,12 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt-get install -y apt-transport-https
|
$STD apt-get install -y \
|
||||||
$STD apt-get install -y alsa-utils
|
apt-transport-https \
|
||||||
$STD apt-get install -y libxext-dev
|
alsa-utils \
|
||||||
$STD apt-get install -y fontconfig
|
libxext-dev \
|
||||||
$STD apt-get install -y libva-drm2
|
fontconfig \
|
||||||
|
libva-drm2
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
msg_info "Installing AgentDVR"
|
msg_info "Installing AgentDVR"
|
||||||
@ -27,7 +28,6 @@ RELEASE=$(curl -fsSL "https://www.ispyconnect.com/api/Agent/DownloadLocation4?pl
|
|||||||
cd /opt/agentdvr/agent
|
cd /opt/agentdvr/agent
|
||||||
curl -fsSL "$RELEASE" -o $(basename "$RELEASE")
|
curl -fsSL "$RELEASE" -o $(basename "$RELEASE")
|
||||||
$STD unzip Agent_Linux64*.zip
|
$STD unzip Agent_Linux64*.zip
|
||||||
rm -rf Agent_Linux64*.zip
|
|
||||||
chmod +x ./Agent
|
chmod +x ./Agent
|
||||||
msg_ok "Installed AgentDVR"
|
msg_ok "Installed AgentDVR"
|
||||||
|
|
||||||
@ -54,6 +54,7 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
|
rm -rf Agent_Linux64*.zip
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -67,5 +67,14 @@ if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
|
|||||||
msg_ok "Installed Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
|
msg_ok "Installed Docker Compose $DOCKER_COMPOSE_LATEST_VERSION"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
read -r -p "${TAB3}Would you like to expose the Docker TCP socket? <y/N> " prompt
|
||||||
|
if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
|
||||||
|
msg_info "Exposing Docker TCP socket"
|
||||||
|
$STD mkdir -p /etc/docker
|
||||||
|
$STD echo '{ "hosts": ["unix:///var/run/docker.sock", "tcp://0.0.0.0:2375"] }' > /etc/docker/daemon.json
|
||||||
|
$STD rc-service docker restart
|
||||||
|
msg_ok "Exposed Docker TCP socket at tcp://+:2375"
|
||||||
|
fi
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
|
@ -24,13 +24,13 @@ RELEASE=$(curl -s https://api.github.com/repos/steveiliop56/tinyauth/releases/la
|
|||||||
curl -fsSL "https://github.com/steveiliop56/tinyauth/releases/download/v${RELEASE}/tinyauth-amd64" -o /opt/tinyauth/tinyauth
|
curl -fsSL "https://github.com/steveiliop56/tinyauth/releases/download/v${RELEASE}/tinyauth-amd64" -o /opt/tinyauth/tinyauth
|
||||||
chmod +x /opt/tinyauth/tinyauth
|
chmod +x /opt/tinyauth/tinyauth
|
||||||
|
|
||||||
PASSWORD=$(openssl rand -base64 8 | tr -dc 'a-zA-Z0-9' | head -c 8)
|
PASS=$(openssl rand -base64 8 | tr -dc 'a-zA-Z0-9' | head -c 8)
|
||||||
USER=$(htpasswd -Bbn "tinyauth" "${PASSWORD}")
|
USER=$(htpasswd -Bbn "tinyauth" "${PASS}")
|
||||||
|
|
||||||
cat <<EOF > /opt/tinyauth/credentials.txt
|
cat <<EOF >/opt/tinyauth/credentials.txt
|
||||||
Tinyauth Credentials
|
Tinyauth Credentials
|
||||||
Username: tinyauth
|
Username: tinyauth
|
||||||
Password: ${PASSWORD}
|
Password: ${PASS}
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
echo "${RELEASE}" >/opt/tinyauth_version.txt
|
echo "${RELEASE}" >/opt/tinyauth_version.txt
|
||||||
|
@ -13,13 +13,9 @@ setting_up_container
|
|||||||
network_check
|
network_check
|
||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Authelia"
|
fetch_and_deploy_gh_release "authelia" "authelia/authelia" "binary"
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/authelia/authelia/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
|
||||||
curl -fsSL "https://github.com/authelia/authelia/releases/download/${RELEASE}/authelia_${RELEASE}_amd64.deb" -o "authelia_${RELEASE}_amd64.deb"
|
|
||||||
$STD dpkg -i "authelia_${RELEASE}_amd64.deb"
|
|
||||||
msg_ok "Install Authelia completed"
|
|
||||||
|
|
||||||
read -p "${TAB3}Enter your domain (ex. example.com): " DOMAIN
|
read -rp "${TAB3}Enter your domain (ex. example.com): " DOMAIN
|
||||||
|
|
||||||
msg_info "Setting Authelia up"
|
msg_info "Setting Authelia up"
|
||||||
touch /etc/authelia/emails.txt
|
touch /etc/authelia/emails.txt
|
||||||
@ -72,7 +68,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -f "authelia_${RELEASE}_amd64.deb"
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -24,13 +24,10 @@ $STD apt-get install -y \
|
|||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
setup_uv
|
setup_uv
|
||||||
|
fetch_and_deploy_gh_release "babybuddy" "babybuddy/babybuddy"
|
||||||
|
|
||||||
msg_info "Installing Babybuddy"
|
msg_info "Installing Babybuddy"
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/babybuddy/babybuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
mkdir -p /opt/data
|
||||||
temp_file=$(mktemp)
|
|
||||||
mkdir -p /opt/{babybuddy,data}
|
|
||||||
curl -fsSL "https://github.com/babybuddy/babybuddy/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
|
||||||
tar zxf "$temp_file" --strip-components=1 -C /opt/babybuddy
|
|
||||||
cd /opt/babybuddy
|
cd /opt/babybuddy
|
||||||
$STD uv venv .venv
|
$STD uv venv .venv
|
||||||
$STD source .venv/bin/activate
|
$STD source .venv/bin/activate
|
||||||
@ -102,7 +99,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -f "$temp_file"
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -14,13 +14,12 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt-get install -y \
|
$STD apt-get install -y apache2
|
||||||
apache2 \
|
|
||||||
libapache2-mod-php \
|
|
||||||
php-{pgsql,dom}
|
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
PG_VERSION="16" setup_postgresql
|
PG_VERSION="16" setup_postgresql
|
||||||
|
PHP_APACHE="YES" PHP_MODULE="pgsql, dom" PHP_VERSION="8.2" setup_php
|
||||||
|
fetch_and_deploy_gh_release "baikal" "sabre-io/Baikal"
|
||||||
|
|
||||||
msg_info "Setting up PostgreSQL Database"
|
msg_info "Setting up PostgreSQL Database"
|
||||||
DB_NAME=baikal
|
DB_NAME=baikal
|
||||||
@ -36,11 +35,7 @@ $STD sudo -u postgres psql -c "CREATE DATABASE $DB_NAME WITH OWNER $DB_USER TEMP
|
|||||||
} >>~/baikal.creds
|
} >>~/baikal.creds
|
||||||
msg_ok "Set up PostgreSQL Database"
|
msg_ok "Set up PostgreSQL Database"
|
||||||
|
|
||||||
msg_info "Installing Baikal"
|
msg_info "Configuring Baikal"
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/sabre-io/Baikal/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
|
||||||
cd /opt
|
|
||||||
curl -fsSL "https://github.com/sabre-io/baikal/releases/download/${RELEASE}/baikal-${RELEASE}.zip" -o "baikal-${RELEASE}.zip"
|
|
||||||
$STD unzip "baikal-${RELEASE}.zip"
|
|
||||||
cat <<EOF >/opt/baikal/config/baikal.yaml
|
cat <<EOF >/opt/baikal/config/baikal.yaml
|
||||||
database:
|
database:
|
||||||
backend: pgsql
|
backend: pgsql
|
||||||
@ -51,7 +46,6 @@ database:
|
|||||||
EOF
|
EOF
|
||||||
chown -R www-data:www-data /opt/baikal/
|
chown -R www-data:www-data /opt/baikal/
|
||||||
chmod -R 755 /opt/baikal/
|
chmod -R 755 /opt/baikal/
|
||||||
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
|
|
||||||
msg_ok "Installed Baikal"
|
msg_ok "Installed Baikal"
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
@ -90,7 +84,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -rf "/opt/baikal-${RELEASE}.zip"
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -16,20 +16,15 @@ update_os
|
|||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt-get install -y \
|
$STD apt-get install -y \
|
||||||
apache2 \
|
apache2 \
|
||||||
redis \
|
redis
|
||||||
php-{curl,date,json,mbstring,redis,sqlite3,sockets} \
|
|
||||||
libapache2-mod-php
|
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
msg_info "Installing barcodebuddy"
|
PHP_VERSION="8.2" PHP_APACHE="YES" PHP_MODULE="date, json, redis, sqlite3, sockets" setup_php
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/Forceu/barcodebuddy/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
fetch_and_deploy_gh_release "barcodebuddy" "Forceu/barcodebuddy"
|
||||||
cd /opt
|
|
||||||
curl -fsSL "https://github.com/Forceu/barcodebuddy/archive/refs/tags/v${RELEASE}.zip" -o "v${RELEASE}.zip"
|
msg_info "Configuring barcodebuddy"
|
||||||
$STD unzip "v${RELEASE}.zip"
|
|
||||||
mv "/opt/barcodebuddy-${RELEASE}" /opt/barcodebuddy
|
|
||||||
chown -R www-data:www-data /opt/barcodebuddy/data
|
chown -R www-data:www-data /opt/barcodebuddy/data
|
||||||
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
|
msg_ok "Configured barcodebuddy"
|
||||||
msg_ok "Installed barcodebuddy"
|
|
||||||
|
|
||||||
msg_info "Creating Services"
|
msg_info "Creating Services"
|
||||||
cat <<EOF >/etc/systemd/system/barcodebuddy.service
|
cat <<EOF >/etc/systemd/system/barcodebuddy.service
|
||||||
@ -73,7 +68,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -rf "/opt/v${RELEASE}.zip"
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -22,17 +22,10 @@ msg_ok "Installed Dependencies"
|
|||||||
|
|
||||||
PG_VERSION="16" setup_postgresql
|
PG_VERSION="16" setup_postgresql
|
||||||
setup_go
|
setup_go
|
||||||
|
fetch_and_deploy_gh_release "bitmagnet" "bitmagnet-io/bitmagnet"
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/bitmagnet-io/bitmagnet/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
|
|
||||||
msg_info "Installing bitmagnet v${RELEASE}"
|
msg_info "Setting up database"
|
||||||
mkdir -p /opt/bitmagnet
|
|
||||||
temp_file=$(mktemp)
|
|
||||||
curl -fsSL "https://github.com/bitmagnet-io/bitmagnet/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
|
||||||
tar zxf "$temp_file" --strip-components=1 -C /opt/bitmagnet
|
|
||||||
cd /opt/bitmagnet
|
|
||||||
VREL=v$RELEASE
|
|
||||||
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
|
|
||||||
chmod +x bitmagnet
|
|
||||||
POSTGRES_PASSWORD=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
|
POSTGRES_PASSWORD=$(openssl rand -base64 18 | tr -dc 'a-zA-Z0-9' | head -c13)
|
||||||
$STD sudo -u postgres psql -c "ALTER USER postgres WITH PASSWORD '$POSTGRES_PASSWORD';"
|
$STD sudo -u postgres psql -c "ALTER USER postgres WITH PASSWORD '$POSTGRES_PASSWORD';"
|
||||||
$STD sudo -u postgres psql -c "CREATE DATABASE bitmagnet;"
|
$STD sudo -u postgres psql -c "CREATE DATABASE bitmagnet;"
|
||||||
@ -41,8 +34,14 @@ $STD sudo -u postgres psql -c "CREATE DATABASE bitmagnet;"
|
|||||||
echo ""
|
echo ""
|
||||||
echo "postgres user password: $POSTGRES_PASSWORD"
|
echo "postgres user password: $POSTGRES_PASSWORD"
|
||||||
} >>~/postgres.creds
|
} >>~/postgres.creds
|
||||||
echo "${RELEASE}" >/opt/bitmagnet_version.txt
|
msg_ok "Database set up"
|
||||||
msg_ok "Installed bitmagnet v${RELEASE}"
|
|
||||||
|
msg_info "Configuring bitmagnet v${RELEASE}"
|
||||||
|
cd /opt/bitmagnet
|
||||||
|
VREL=v$RELEASE
|
||||||
|
$STD go build -ldflags "-s -w -X github.com/bitmagnet-io/bitmagnet/internal/version.GitTag=$VREL"
|
||||||
|
chmod +x bitmagnet
|
||||||
|
msg_ok "Configured bitmagnet v${RELEASE}"
|
||||||
|
|
||||||
read -r -p "${TAB3}Enter your TMDB API key if you have one: " tmdbapikey
|
read -r -p "${TAB3}Enter your TMDB API key if you have one: " tmdbapikey
|
||||||
|
|
||||||
@ -72,7 +71,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -f "$temp_file"
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -16,12 +16,12 @@ update_os
|
|||||||
msg_info "Installing Dependencies (Patience)"
|
msg_info "Installing Dependencies (Patience)"
|
||||||
$STD apt-get install -y \
|
$STD apt-get install -y \
|
||||||
apache2 \
|
apache2 \
|
||||||
php8.2-{mbstring,gd,fpm,curl,intl,ldap,tidy,bz2,mysql,zip,xml} \
|
|
||||||
composer \
|
|
||||||
libapache2-mod-php \
|
|
||||||
make
|
make
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
|
PHP_MODULE="ldap,tidy,bz2,mysqli" PHP_FPM="YES" PHP_APACHE="YES" PHP_VERSION="8.3" setup_php
|
||||||
|
|
||||||
|
setup_composer
|
||||||
setup_mariadb
|
setup_mariadb
|
||||||
|
|
||||||
msg_info "Setting up Database"
|
msg_info "Setting up Database"
|
||||||
@ -39,13 +39,10 @@ $STD mariadb -u root -e "GRANT ALL ON $DB_NAME.* TO '$DB_USER'@'localhost'; FLUS
|
|||||||
} >>~/bookstack.creds
|
} >>~/bookstack.creds
|
||||||
msg_ok "Set up database"
|
msg_ok "Set up database"
|
||||||
|
|
||||||
msg_info "Setup Bookstack (Patience)"
|
fetch_and_deploy_gh_release "bookstack" "BookStackApp/BookStack"
|
||||||
LOCAL_IP="$(hostname -I | awk '{print $1}')"
|
LOCAL_IP="$(hostname -I | awk '{print $1}')"
|
||||||
cd /opt
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/BookStackApp/BookStack/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
msg_info "Configuring Bookstack (Patience)"
|
||||||
curl -fsSL "https://github.com/BookStackApp/BookStack/archive/refs/tags/v${RELEASE}.zip" -o "v${RELEASE}.zip"
|
|
||||||
$STD unzip v${RELEASE}.zip
|
|
||||||
mv BookStack-${RELEASE} /opt/bookstack
|
|
||||||
cd /opt/bookstack
|
cd /opt/bookstack
|
||||||
cp .env.example .env
|
cp .env.example .env
|
||||||
sudo sed -i "s|APP_URL=.*|APP_URL=http://$LOCAL_IP|g" /opt/bookstack/.env
|
sudo sed -i "s|APP_URL=.*|APP_URL=http://$LOCAL_IP|g" /opt/bookstack/.env
|
||||||
@ -61,8 +58,7 @@ chmod -R 775 /opt/bookstack/storage /opt/bookstack/bootstrap/cache /opt/bookstac
|
|||||||
chmod -R 640 /opt/bookstack/.env
|
chmod -R 640 /opt/bookstack/.env
|
||||||
$STD a2enmod rewrite
|
$STD a2enmod rewrite
|
||||||
$STD a2enmod php8.2
|
$STD a2enmod php8.2
|
||||||
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
|
msg_ok "Configured Bookstack"
|
||||||
msg_ok "Installed Bookstack"
|
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
cat <<EOF >/etc/apache2/sites-available/bookstack.conf
|
cat <<EOF >/etc/apache2/sites-available/bookstack.conf
|
||||||
@ -111,7 +107,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -rf /opt/v${RELEASE}.zip
|
|
||||||
$STD apt-get autoremove
|
$STD apt-get autoremove
|
||||||
$STD apt-get autoclean
|
$STD apt-get autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -18,19 +18,12 @@ $STD apt-get install -y apt-transport-https
|
|||||||
$STD apt-get install -y lsb-release
|
$STD apt-get install -y lsb-release
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
msg_info "Installing Nginx"
|
|
||||||
curl -fsSL "https://nginx.org/keys/nginx_signing.key" | gpg --dearmor >/usr/share/keyrings/nginx-archive-keyring.gpg
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/nginx-archive-keyring.gpg] http://nginx.org/packages/debian $(lsb_release -cs) nginx" >/etc/apt/sources.list.d/nginx.list
|
|
||||||
$STD apt-get update
|
|
||||||
$STD apt-get install -y nginx=1.26.3*
|
|
||||||
msg_ok "Installed Nginx"
|
|
||||||
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/bunkerity/bunkerweb/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
RELEASE=$(curl -fsSL https://api.github.com/repos/bunkerity/bunkerweb/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
||||||
msg_info "Installing BunkerWeb v${RELEASE} (Patience)"
|
msg_info "Installing BunkerWeb v${RELEASE} (Patience)"
|
||||||
curl -fsSL "https://repo.bunkerweb.io/bunkerity/bunkerweb/gpgkey" | gpg --dearmor >/etc/apt/keyrings/bunkerity_bunkerweb-archive-keyring.gpg
|
curl -fsSL -o install-bunkerweb.sh https://github.com/bunkerity/bunkerweb/raw/v${RELEASE}/misc/install-bunkerweb.sh
|
||||||
echo "deb [signed-by=/etc/apt/keyrings/bunkerity_bunkerweb-archive-keyring.gpg] https://repo.bunkerweb.io/bunkerity/bunkerweb/debian/ bookworm main" >/etc/apt/sources.list.d/bunkerity_bunkerweb.list
|
chmod +x install-bunkerweb.sh
|
||||||
$STD apt-get update
|
$STD ./install-bunkerweb.sh --yes
|
||||||
$STD apt-get install -y bunkerweb=${RELEASE}
|
$STD apt-mark unhold bunkerweb nginx
|
||||||
cat <<EOF >/etc/apt/preferences.d/bunkerweb
|
cat <<EOF >/etc/apt/preferences.d/bunkerweb
|
||||||
Package: bunkerweb
|
Package: bunkerweb
|
||||||
Pin: version ${RELEASE}
|
Pin: version ${RELEASE}
|
||||||
|
@ -14,22 +14,17 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
NODE_VERSION="22" setup_nodejs
|
NODE_VERSION="22" setup_nodejs
|
||||||
|
fetch_and_deploy_gh_release "bytestash" "jordan-dalby/ByteStash"
|
||||||
|
|
||||||
msg_info "Installing ByteStash"
|
msg_info "Installing ByteStash"
|
||||||
JWT_SECRET=$(openssl rand -base64 32 | tr -d '/+=')
|
JWT_SECRET=$(openssl rand -base64 32 | tr -d '/+=')
|
||||||
temp_file=$(mktemp)
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/jordan-dalby/ByteStash/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
|
||||||
curl -fsSL "https://github.com/jordan-dalby/ByteStash/archive/refs/tags/v${RELEASE}.tar.gz" -o "$temp_file"
|
|
||||||
tar zxf $temp_file
|
|
||||||
mv ByteStash-${RELEASE} /opt/bytestash
|
|
||||||
cd /opt/bytestash/server
|
cd /opt/bytestash/server
|
||||||
$STD npm install
|
$STD npm install
|
||||||
cd /opt/bytestash/client
|
cd /opt/bytestash/client
|
||||||
$STD npm install
|
$STD npm install
|
||||||
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
|
|
||||||
msg_ok "Installed ByteStash"
|
msg_ok "Installed ByteStash"
|
||||||
|
|
||||||
read -p "${TAB3}Do you want to allow registration of multiple accounts? [y/n]: " allowreg
|
read -rp "${TAB3}Do you want to allow registration of multiple accounts? [y/n]: " allowreg
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
cat <<EOF >/etc/systemd/system/bytestash-backend.service
|
cat <<EOF >/etc/systemd/system/bytestash-backend.service
|
||||||
@ -73,7 +68,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -f $temp_file
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -14,20 +14,15 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt-get install -y \
|
$STD apt-get install -y git
|
||||||
git
|
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
NODE_VERSION="22" setup_nodejs
|
NODE_VERSION="22" setup_nodejs
|
||||||
|
|
||||||
read -p "${TAB3}Install OnlyOffice components instead of CKEditor? (Y/N): " onlyoffice
|
read -rp "${TAB3}Install OnlyOffice components instead of CKEditor? (Y/N): " onlyoffice
|
||||||
|
fetch_and_deploy_gh_release "cryptpad" "cryptpad/cryptpad"
|
||||||
|
|
||||||
msg_info "Setup ${APPLICATION}"
|
msg_info "Setup ${APPLICATION}"
|
||||||
temp_file=$(mktemp)
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/cryptpad/cryptpad/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
|
||||||
curl -fsSL "https://github.com/cryptpad/cryptpad/archive/refs/tags/${RELEASE}.tar.gz" -o "$temp_file"
|
|
||||||
tar zxf $temp_file
|
|
||||||
mv cryptpad-$RELEASE /opt/cryptpad
|
|
||||||
cd /opt/cryptpad
|
cd /opt/cryptpad
|
||||||
$STD npm ci
|
$STD npm ci
|
||||||
$STD npm run install:components
|
$STD npm run install:components
|
||||||
@ -39,7 +34,6 @@ sed -i "80s#//httpAddress: 'localhost'#httpAddress: '0.0.0.0'#g" /opt/cryptpad/c
|
|||||||
if [[ "$onlyoffice" =~ ^[Yy]$ ]]; then
|
if [[ "$onlyoffice" =~ ^[Yy]$ ]]; then
|
||||||
$STD bash -c "./install-onlyoffice.sh --accept-license"
|
$STD bash -c "./install-onlyoffice.sh --accept-license"
|
||||||
fi
|
fi
|
||||||
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
|
|
||||||
msg_ok "Setup ${APPLICATION}"
|
msg_ok "Setup ${APPLICATION}"
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
@ -69,7 +63,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -f $temp_file
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -14,15 +14,12 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
NODE_VERSION="22" setup_nodejs
|
NODE_VERSION="22" setup_nodejs
|
||||||
|
fetch_and_deploy_gh_release "dashy" "Lissy93/dashy"
|
||||||
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/Lissy93/dashy/releases/latest | grep "tag_name" | awk '{print substr($2, 2, length($2)-3) }')
|
|
||||||
msg_info "Installing Dashy ${RELEASE} (Patience)"
|
msg_info "Installing Dashy ${RELEASE} (Patience)"
|
||||||
mkdir -p /opt/dashy
|
|
||||||
curl -fsSL "https://github.com/Lissy93/dashy/archive/refs/tags/${RELEASE}.tar.gz" | tar -xz -C /opt/dashy --strip-components=1
|
|
||||||
cd /opt/dashy
|
cd /opt/dashy
|
||||||
$STD npm install
|
$STD npm install
|
||||||
$STD npm run build
|
$STD npm run build
|
||||||
echo "${RELEASE}" >/opt/${APPLICATION}_version.txt
|
|
||||||
msg_ok "Installed Dashy ${RELEASE}"
|
msg_ok "Installed Dashy ${RELEASE}"
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
|
@ -57,6 +57,15 @@ else
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
read -r -p "${TAB3}Would you like to expose the Docker TCP socket? <y/N> " prompt
|
||||||
|
if [[ "${prompt,,}" =~ ^(y|yes)$ ]]; then
|
||||||
|
msg_info "Exposing Docker TCP socket"
|
||||||
|
$STD mkdir -p /etc/docker
|
||||||
|
$STD echo '{ "hosts": ["unix:///var/run/docker.sock", "tcp://0.0.0.0:2375"] }' > /etc/docker/daemon.json
|
||||||
|
$STD rc-service docker restart
|
||||||
|
msg_ok "Exposed Docker TCP socket at tcp://+:2375"
|
||||||
|
fi
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ msg_ok "Installed Dependencies"
|
|||||||
|
|
||||||
NODE_VERSION="22" NODE_MODULE="pnpm@$(curl -s https://raw.githubusercontent.com/docmost/docmost/main/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
|
NODE_VERSION="22" NODE_MODULE="pnpm@$(curl -s https://raw.githubusercontent.com/docmost/docmost/main/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
|
||||||
PG_VERSION="16" setup_postgresql
|
PG_VERSION="16" setup_postgresql
|
||||||
|
fetch_and_deploy_gh_release "docmost" "docmost/docmost"
|
||||||
|
|
||||||
msg_info "Setting up PostgreSQL"
|
msg_info "Setting up PostgreSQL"
|
||||||
DB_NAME="docmost_db"
|
DB_NAME="docmost_db"
|
||||||
@ -40,12 +41,7 @@ $STD sudo -u postgres psql -c "ALTER ROLE $DB_USER SET timezone TO 'UTC'"
|
|||||||
} >>~/docmost.creds
|
} >>~/docmost.creds
|
||||||
msg_ok "Set up PostgreSQL"
|
msg_ok "Set up PostgreSQL"
|
||||||
|
|
||||||
msg_info "Installing Docmost (Patience)"
|
msg_info "Configuring Docmost (Patience)"
|
||||||
temp_file=$(mktemp)
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/docmost/docmost/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
|
||||||
curl -fsSL "https://github.com/docmost/docmost/archive/refs/tags/v${RELEASE}.tar.gz" -o ""$temp_file""
|
|
||||||
tar -xzf "$temp_file"
|
|
||||||
mv docmost-${RELEASE} /opt/docmost
|
|
||||||
cd /opt/docmost
|
cd /opt/docmost
|
||||||
mv .env.example .env
|
mv .env.example .env
|
||||||
mkdir data
|
mkdir data
|
||||||
@ -56,8 +52,7 @@ sed -i -e "s|APP_SECRET=.*|APP_SECRET=$(openssl rand -base64 32 | tr -dc 'a-zA-Z
|
|||||||
export NODE_OPTIONS="--max-old-space-size=2048"
|
export NODE_OPTIONS="--max-old-space-size=2048"
|
||||||
$STD pnpm install
|
$STD pnpm install
|
||||||
$STD pnpm build
|
$STD pnpm build
|
||||||
echo "${RELEASE}" >"/opt/${APPLICATION}_version.txt"
|
msg_ok "Configured Docmost"
|
||||||
msg_ok "Installed Docmost"
|
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
cat <<EOF >/etc/systemd/system/docmost.service
|
cat <<EOF >/etc/systemd/system/docmost.service
|
||||||
@ -81,7 +76,6 @@ motd_ssh
|
|||||||
customize
|
customize
|
||||||
|
|
||||||
msg_info "Cleaning up"
|
msg_info "Cleaning up"
|
||||||
rm -f "$temp_file"
|
|
||||||
$STD apt-get -y autoremove
|
$STD apt-get -y autoremove
|
||||||
$STD apt-get -y autoclean
|
$STD apt-get -y autoclean
|
||||||
msg_ok "Cleaned"
|
msg_ok "Cleaned"
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
# Copyright (c) 2021-2025 community-scripts ORG
|
# Copyright (c) 2021-2025 community-scripts ORG
|
||||||
# Author: CrazyWolf13
|
# Author: CrazyWolf13
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/arunavo4/gitea-mirror
|
# Source: https://github.com/RayLabsHQ/gitea-mirror
|
||||||
|
|
||||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
color
|
color
|
||||||
@ -28,7 +28,7 @@ ln -sf /opt/bun/bin/bun /usr/local/bin/bun
|
|||||||
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
|
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
|
||||||
msg_ok "Installed Bun"
|
msg_ok "Installed Bun"
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "gitea-mirror" "arunavo4/gitea-mirror"
|
fetch_and_deploy_gh_release "gitea-mirror" "RayLabsHQ/gitea-mirror"
|
||||||
|
|
||||||
msg_info "Installing gitea-mirror"
|
msg_info "Installing gitea-mirror"
|
||||||
cd /opt/gitea-mirror
|
cd /opt/gitea-mirror
|
||||||
|
@ -93,7 +93,7 @@ if [[ "$CTTYPE" == "0" ]]; then
|
|||||||
fi
|
fi
|
||||||
msg_ok "Dependencies Installed"
|
msg_ok "Dependencies Installed"
|
||||||
|
|
||||||
read -r -p "Install OpenVINO dependencies for Intel HW-accelerated machine-learning? y/N " prompt
|
read -r -p "${TAB3}Install OpenVINO dependencies for Intel HW-accelerated machine-learning? y/N " prompt
|
||||||
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
if [[ ${prompt,,} =~ ^(y|yes)$ ]]; then
|
||||||
msg_info "Installing OpenVINO dependencies"
|
msg_info "Installing OpenVINO dependencies"
|
||||||
touch ~/.openvino
|
touch ~/.openvino
|
||||||
|
@ -14,22 +14,17 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt-get install -y make
|
$STD apt-get install -y \
|
||||||
$STD apt-get install -y g++
|
ca-certificates \
|
||||||
$STD apt-get install -y gcc
|
build-essential
|
||||||
$STD apt-get install -y ca-certificates
|
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
|
NODE_VERSION="22" NODE_MODULE="yarn@latest" setup_nodejs
|
||||||
|
fetch_and_deploy_gh_release "mafl" "hywax/mafl"
|
||||||
|
|
||||||
RELEASE=$(curl -fsSL https://api.github.com/repos/hywax/mafl/releases/latest | grep "tag_name" | awk '{print substr($2, 3, length($2)-4) }')
|
|
||||||
msg_info "Installing Mafl v${RELEASE}"
|
msg_info "Installing Mafl v${RELEASE}"
|
||||||
curl -fsSL "https://github.com/hywax/mafl/archive/refs/tags/v${RELEASE}.tar.gz" -o "v${RELEASE}.tar.gz"
|
|
||||||
tar -xzf v${RELEASE}.tar.gz
|
|
||||||
mkdir -p /opt/mafl/data
|
mkdir -p /opt/mafl/data
|
||||||
curl -fsSL "https://raw.githubusercontent.com/hywax/mafl/main/.example/config.yml" -o "/opt/mafl/data/config.yml"
|
curl -fsSL "https://raw.githubusercontent.com/hywax/mafl/main/.example/config.yml" -o "/opt/mafl/data/config.yml"
|
||||||
mv mafl-${RELEASE}/* /opt/mafl
|
|
||||||
rm -rf mafl-${RELEASE}
|
|
||||||
cd /opt/mafl
|
cd /opt/mafl
|
||||||
export NUXT_TELEMETRY_DISABLED=true
|
export NUXT_TELEMETRY_DISABLED=true
|
||||||
$STD yarn install
|
$STD yarn install
|
||||||
|
@ -80,11 +80,13 @@ curl -fsSL "https://github.com/Stirling-Tools/Stirling-PDF/archive/refs/tags/v${
|
|||||||
tar -xzf v${RELEASE}.tar.gz
|
tar -xzf v${RELEASE}.tar.gz
|
||||||
cd Stirling-PDF-$RELEASE
|
cd Stirling-PDF-$RELEASE
|
||||||
chmod +x ./gradlew
|
chmod +x ./gradlew
|
||||||
$STD ./gradlew build
|
$STD ./gradlew build -x spotlessApply -x spotlessCheck -x test -x sonarqube
|
||||||
mkdir -p /opt/Stirling-PDF
|
mkdir -p /opt/Stirling-PDF
|
||||||
touch /opt/Stirling-PDF/.env
|
touch /opt/Stirling-PDF/.env
|
||||||
mv ./build/libs/Stirling-PDF-*.jar /opt/Stirling-PDF/
|
mv ./stirling-pdf/build/libs/*.jar /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar
|
||||||
mv scripts /opt/Stirling-PDF/
|
mv scripts /opt/Stirling-PDF/
|
||||||
|
mv pipeline /opt/Stirling-PDF/
|
||||||
|
mv stirling-pdf/src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
|
||||||
ln -s /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
|
ln -s /opt/Stirling-PDF/Stirling-PDF-$RELEASE.jar /opt/Stirling-PDF/Stirling-PDF.jar
|
||||||
ln -s /usr/share/tesseract-ocr/5/tessdata/ /usr/share/tessdata
|
ln -s /usr/share/tesseract-ocr/5/tessdata/ /usr/share/tessdata
|
||||||
msg_ok "Installed Stirling-PDF"
|
msg_ok "Installed Stirling-PDF"
|
||||||
|
@ -14,16 +14,12 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apt-get install -y ffmpeg
|
$STD apt-get install -y \
|
||||||
$STD apt-get install -y vlc
|
ffmpeg \
|
||||||
|
vlc
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
msg_info "Installing Threadfin"
|
fetch_and_deploy_gh_release "threadfin" "threadfin/threadfin" "singlefile" "latest" "/opt/threadfin" "Threadfin_linux_amd64"
|
||||||
mkdir -p /opt/threadfin
|
|
||||||
curl -fsSL "https://github.com/Threadfin/Threadfin/releases/latest/download/Threadfin_linux_amd64" -o "/opt/threadfin/threadfin"
|
|
||||||
chmod +x /opt/threadfin/threadfin
|
|
||||||
|
|
||||||
msg_ok "Installed Threadfin"
|
|
||||||
|
|
||||||
msg_info "Creating Service"
|
msg_info "Creating Service"
|
||||||
cat <<EOF >/etc/systemd/system/threadfin.service
|
cat <<EOF >/etc/systemd/system/threadfin.service
|
||||||
|
@ -83,11 +83,6 @@ update_os() {
|
|||||||
msg_info "Updating Container OS"
|
msg_info "Updating Container OS"
|
||||||
$STD apk -U upgrade
|
$STD apk -U upgrade
|
||||||
msg_ok "Updated Container OS"
|
msg_ok "Updated Container OS"
|
||||||
|
|
||||||
msg_info "Installing core dependencies"
|
|
||||||
$STD apk update
|
|
||||||
$STD apk add newt curl openssh nano mc ncurses gpg
|
|
||||||
msg_ok "Core dependencies installed"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# This function modifies the message of the day (motd) and SSH settings
|
# This function modifies the message of the day (motd) and SSH settings
|
||||||
|
132
misc/build.func
132
misc/build.func
@ -304,13 +304,12 @@ echo_default() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Output the selected values with icons
|
# Output the selected values with icons
|
||||||
echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os${CL}"
|
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
|
||||||
echo -e "${OSVERSION}${BOLD}${DGN}Version: ${BGN}$var_version${CL}"
|
echo -e "${OS}${BOLD}${DGN}Operating System: ${BGN}$var_os ($var_version)${CL}"
|
||||||
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}"
|
echo -e "${CONTAINERTYPE}${BOLD}${DGN}Container Type: ${BGN}$CT_TYPE_DESC${CL}"
|
||||||
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
|
echo -e "${DISKSIZE}${BOLD}${DGN}Disk Size: ${BGN}${DISK_SIZE} GB${CL}"
|
||||||
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}${CORE_COUNT}${CL}"
|
echo -e "${CPUCORE}${BOLD}${DGN}CPU Cores: ${BGN}${CORE_COUNT}${CL}"
|
||||||
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
|
echo -e "${RAMSIZE}${BOLD}${DGN}RAM Size: ${BGN}${RAM_SIZE} MiB${CL}"
|
||||||
echo -e "${CONTAINERID}${BOLD}${DGN}Container ID: ${BGN}${CT_ID}${CL}"
|
|
||||||
if [ "$VERB" == "yes" ]; then
|
if [ "$VERB" == "yes" ]; then
|
||||||
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}Enabled${CL}"
|
echo -e "${SEARCH}${BOLD}${DGN}Verbose Mode: ${BGN}Enabled${CL}"
|
||||||
fi
|
fi
|
||||||
@ -1095,7 +1094,9 @@ build_container() {
|
|||||||
# This executes create_lxc.sh and creates the container and .conf file
|
# This executes create_lxc.sh and creates the container and .conf file
|
||||||
bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/create_lxc.sh)" $?
|
bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/create_lxc.sh)" $?
|
||||||
|
|
||||||
LXC_CONFIG=/etc/pve/lxc/${CTID}.conf
|
LXC_CONFIG="/etc/pve/lxc/${CTID}.conf"
|
||||||
|
|
||||||
|
# USB passthrough for privileged LXC (CT_TYPE=0)
|
||||||
if [ "$CT_TYPE" == "0" ]; then
|
if [ "$CT_TYPE" == "0" ]; then
|
||||||
cat <<EOF >>"$LXC_CONFIG"
|
cat <<EOF >>"$LXC_CONFIG"
|
||||||
# USB passthrough
|
# USB passthrough
|
||||||
@ -1111,38 +1112,98 @@ lxc.mount.entry: /dev/ttyACM1 dev/ttyACM1 none bind,optional,create=
|
|||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$CT_TYPE" == "0" ]; then
|
# VAAPI passthrough for privileged containers or known apps
|
||||||
if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
|
VAAPI_APPS=(
|
||||||
cat <<EOF >>"$LXC_CONFIG"
|
"immich"
|
||||||
# VAAPI hardware transcoding
|
"Channels"
|
||||||
lxc.cgroup2.devices.allow: c 226:0 rwm
|
"Emby"
|
||||||
lxc.cgroup2.devices.allow: c 226:128 rwm
|
"ErsatzTV"
|
||||||
lxc.cgroup2.devices.allow: c 29:0 rwm
|
"Frigate"
|
||||||
lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file
|
"Jellyfin"
|
||||||
lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir
|
"Plex"
|
||||||
lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file
|
"Scrypted"
|
||||||
EOF
|
"Tdarr"
|
||||||
|
"Unmanic"
|
||||||
|
"Ollama"
|
||||||
|
"FileFlows"
|
||||||
|
"Open WebUI"
|
||||||
|
)
|
||||||
|
|
||||||
|
is_vaapi_app=false
|
||||||
|
for vaapi_app in "${VAAPI_APPS[@]}"; do
|
||||||
|
if [[ "$APP" == "$vaapi_app" ]]; then
|
||||||
|
is_vaapi_app=true
|
||||||
|
break
|
||||||
fi
|
fi
|
||||||
else
|
done
|
||||||
if [[ "$APP" == "Channels" || "$APP" == "Emby" || "$APP" == "ErsatzTV" || "$APP" == "Frigate" || "$APP" == "Jellyfin" || "$APP" == "Plex" || "$APP" == "immich" || "$APP" == "Tdarr" || "$APP" == "Open WebUI" || "$APP" == "Unmanic" || "$APP" == "Ollama" || "$APP" == "FileFlows" ]]; then
|
|
||||||
if [[ -e "/dev/dri/renderD128" ]]; then
|
if ([ "$CT_TYPE" == "0" ] || [ "$is_vaapi_app" == "true" ]) &&
|
||||||
if [[ -e "/dev/dri/card0" ]]; then
|
([[ -e /dev/dri/renderD128 ]] || [[ -e /dev/dri/card0 ]] || [[ -e /dev/fb0 ]]); then
|
||||||
cat <<EOF >>"$LXC_CONFIG"
|
|
||||||
# VAAPI hardware transcoding
|
echo ""
|
||||||
dev0: /dev/dri/card0,gid=44
|
msg_custom "⚙️ " "\e[96m" "Configuring VAAPI passthrough for LXC container"
|
||||||
dev1: /dev/dri/renderD128,gid=104
|
|
||||||
EOF
|
if [ "$CT_TYPE" != "0" ]; then
|
||||||
else
|
msg_custom "⚠️ " "\e[33m" "Container is unprivileged – VAAPI passthrough may not work without additional host configuration (e.g., idmap)."
|
||||||
cat <<EOF >>"$LXC_CONFIG"
|
fi
|
||||||
# VAAPI hardware transcoding
|
|
||||||
dev0: /dev/dri/card1,gid=44
|
msg_custom "ℹ️ " "\e[96m" "VAAPI enables GPU hardware acceleration (e.g., for video transcoding in Jellyfin or Plex)."
|
||||||
dev1: /dev/dri/renderD128,gid=104
|
|
||||||
EOF
|
echo ""
|
||||||
|
read -rp "➤ Automatically mount all available VAAPI devices? [Y/n]: " VAAPI_ALL
|
||||||
|
|
||||||
|
if [[ "$VAAPI_ALL" =~ ^[Yy]$|^$ ]]; then
|
||||||
|
# Mount all devices automatically
|
||||||
|
if [[ -e /dev/dri/renderD128 ]]; then
|
||||||
|
echo "lxc.cgroup2.devices.allow: c 226:128 rwm" >>"$LXC_CONFIG"
|
||||||
|
echo "lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||||
|
fi
|
||||||
|
if [[ -e /dev/dri/card0 ]]; then
|
||||||
|
echo "lxc.cgroup2.devices.allow: c 226:0 rwm" >>"$LXC_CONFIG"
|
||||||
|
|
||||||
|
echo "lxc.mount.entry: /dev/dri/card0 dev/dri/card0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||||
|
fi
|
||||||
|
if [[ -e /dev/fb0 ]]; then
|
||||||
|
echo "lxc.cgroup2.devices.allow: c 29:0 rwm" >>"$LXC_CONFIG"
|
||||||
|
echo "lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||||
|
fi
|
||||||
|
if [[ -d /dev/dri ]]; then
|
||||||
|
echo "lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir" >>"$LXC_CONFIG"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Manual selection per device
|
||||||
|
if [[ -e /dev/dri/renderD128 ]]; then
|
||||||
|
read -rp "➤ Mount /dev/dri/renderD128 (GPU rendering)? [y/N]: " MOUNT_D128
|
||||||
|
if [[ "$MOUNT_D128" =~ ^[Yy]$ ]]; then
|
||||||
|
echo "lxc.cgroup2.devices.allow: c 226:128 rwm" >>"$LXC_CONFIG"
|
||||||
|
echo "lxc.mount.entry: /dev/dri/renderD128 dev/dri/renderD128 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ -e /dev/dri/card0 ]]; then
|
||||||
|
read -rp "➤ Mount /dev/dri/card0 (GPU hardware interface)? [y/N]: " MOUNT_CARD0
|
||||||
|
if [[ "$MOUNT_CARD0" =~ ^[Yy]$ ]]; then
|
||||||
|
echo "lxc.cgroup2.devices.allow: c 226:0 rwm" >>"$LXC_CONFIG"
|
||||||
|
echo "lxc.mount.entry: /dev/dri/card0 dev/dri/card0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||||
|
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -e /dev/fb0 ]]; then
|
||||||
|
read -rp "➤ Mount /dev/fb0 (Framebuffer, GUI)? [y/N]: " MOUNT_FB0
|
||||||
|
if [[ "$MOUNT_FB0" =~ ^[Yy]$ ]]; then
|
||||||
|
echo "lxc.cgroup2.devices.allow: c 29:0 rwm" >>"$LXC_CONFIG"
|
||||||
|
echo "lxc.mount.entry: /dev/fb0 dev/fb0 none bind,optional,create=file" >>"$LXC_CONFIG"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -d /dev/dri ]]; then
|
||||||
|
echo "lxc.mount.entry: /dev/dri dev/dri none bind,optional,create=dir" >>"$LXC_CONFIG"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# TUN device passthrough
|
||||||
if [ "$ENABLE_TUN" == "yes" ]; then
|
if [ "$ENABLE_TUN" == "yes" ]; then
|
||||||
cat <<EOF >>"$LXC_CONFIG"
|
cat <<EOF >>"$LXC_CONFIG"
|
||||||
lxc.cgroup2.devices.allow: c 10:200 rwm
|
lxc.cgroup2.devices.allow: c 10:200 rwm
|
||||||
@ -1172,10 +1233,13 @@ EOF'
|
|||||||
locale-gen >/dev/null && \
|
locale-gen >/dev/null && \
|
||||||
export LANG=\$locale_line"
|
export LANG=\$locale_line"
|
||||||
|
|
||||||
|
if [[ -z "${tz:-}" ]]; then
|
||||||
|
tz=$(timedatectl show --property=Timezone --value 2>/dev/null || echo "Etc/UTC")
|
||||||
|
fi
|
||||||
if pct exec "$CTID" -- test -e "/usr/share/zoneinfo/$tz"; then
|
if pct exec "$CTID" -- test -e "/usr/share/zoneinfo/$tz"; then
|
||||||
pct exec "$CTID" -- bash -c "echo $tz >/etc/timezone && ln -sf /usr/share/zoneinfo/$tz /etc/localtime"
|
pct exec "$CTID" -- bash -c "tz='$tz'; echo \"\$tz\" >/etc/timezone && ln -sf \"/usr/share/zoneinfo/\$tz\" /etc/localtime"
|
||||||
else
|
else
|
||||||
msg_info "Skipping timezone setup – zone '$tz' not found in container"
|
msg_warn "Skipping timezone setup – zone '$tz' not found in container"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
pct exec "$CTID" -- bash -c "apt-get update >/dev/null && apt-get install -y sudo curl mc gnupg2 >/dev/null"
|
pct exec "$CTID" -- bash -c "apt-get update >/dev/null && apt-get install -y sudo curl mc gnupg2 >/dev/null"
|
||||||
@ -1255,7 +1319,9 @@ api_exit_script() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
trap 'api_exit_script' EXIT
|
if command -v pveversion >/dev/null 2>&1; then
|
||||||
|
trap 'api_exit_script' EXIT
|
||||||
|
fi
|
||||||
trap 'post_update_to_api "failed" "$BASH_COMMAND"' ERR
|
trap 'post_update_to_api "failed" "$BASH_COMMAND"' ERR
|
||||||
trap 'post_update_to_api "failed" "INTERRUPTED"' SIGINT
|
trap 'post_update_to_api "failed" "INTERRUPTED"' SIGINT
|
||||||
trap 'post_update_to_api "failed" "TERMINATED"' SIGTERM
|
trap 'post_update_to_api "failed" "TERMINATED"' SIGTERM
|
||||||
|
400
misc/core.func
400
misc/core.func
@ -1,30 +1,6 @@
|
|||||||
# Copyright (c) 2021-2025 community-scripts ORG
|
# Copyright (c) 2021-2025 community-scripts ORG
|
||||||
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
|
||||||
|
|
||||||
# if ! declare -f wait_for >/dev/null; then
|
|
||||||
# echo "[DEBUG] Undefined function 'wait_for' used from: ${BASH_SOURCE[*]}" >&2
|
|
||||||
# wait_for() {
|
|
||||||
# echo "[DEBUG] Fallback: wait_for called with: $*" >&2
|
|
||||||
# true
|
|
||||||
# }
|
|
||||||
# fi
|
|
||||||
|
|
||||||
trap 'on_error $? $LINENO' ERR
|
|
||||||
trap 'on_exit' EXIT
|
|
||||||
trap 'on_interrupt' INT
|
|
||||||
trap 'on_terminate' TERM
|
|
||||||
|
|
||||||
if ! declare -f wait_for >/dev/null; then
|
|
||||||
wait_for() {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
|
|
||||||
declare -A MSG_INFO_SHOWN=()
|
|
||||||
SPINNER_PID=""
|
|
||||||
SPINNER_ACTIVE=0
|
|
||||||
SPINNER_MSG=""
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Loads core utility groups once (colors, formatting, icons, defaults).
|
# Loads core utility groups once (colors, formatting, icons, defaults).
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
@ -43,100 +19,51 @@ load_functions() {
|
|||||||
# add more
|
# add more
|
||||||
}
|
}
|
||||||
|
|
||||||
on_error() {
|
# ============================================================================
|
||||||
local exit_code="$1"
|
# Error & Signal Handling – robust, universal, subshell-safe
|
||||||
local lineno="$2"
|
# ============================================================================
|
||||||
|
|
||||||
stop_spinner
|
_tool_error_hint() {
|
||||||
|
local cmd="$1"
|
||||||
case "$exit_code" in
|
local code="$2"
|
||||||
1) msg_error "Generic error occurred (line $lineno)" ;;
|
case "$cmd" in
|
||||||
2) msg_error "Shell misuse (line $lineno)" ;;
|
curl)
|
||||||
126) msg_error "Command cannot execute (line $lineno)" ;;
|
case "$code" in
|
||||||
127) msg_error "Command not found (line $lineno)" ;;
|
6) echo "Curl: Could not resolve host (DNS problem)" ;;
|
||||||
128) msg_error "Invalid exit argument (line $lineno)" ;;
|
7) echo "Curl: Failed to connect to host (connection refused)" ;;
|
||||||
130) msg_error "Script aborted by user (CTRL+C)" ;;
|
22) echo "Curl: HTTP error (404/403 etc)" ;;
|
||||||
143) msg_error "Script terminated by SIGTERM" ;;
|
28) echo "Curl: Operation timeout" ;;
|
||||||
*) msg_error "Script failed at line $lineno with exit code $exit_code" ;;
|
*) echo "Curl: Unknown error ($code)" ;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
exit "$exit_code"
|
|
||||||
}
|
|
||||||
|
|
||||||
on_exit() {
|
|
||||||
cleanup_spinner || true
|
|
||||||
[[ "${VERBOSE:-no}" == "yes" ]] && msg_info "Script exited"
|
|
||||||
}
|
|
||||||
|
|
||||||
on_interrupt() {
|
|
||||||
msg_error "Interrupted by user (CTRL+C)"
|
|
||||||
exit 130
|
|
||||||
}
|
|
||||||
|
|
||||||
on_terminate() {
|
|
||||||
msg_error "Terminated by signal (TERM)"
|
|
||||||
exit 143
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_trap_abort_handling() {
|
|
||||||
trap '__handle_signal_abort SIGINT' SIGINT
|
|
||||||
trap '__handle_signal_abort SIGTERM' SIGTERM
|
|
||||||
trap '__handle_unexpected_error $?' ERR
|
|
||||||
}
|
|
||||||
|
|
||||||
__handle_signal_abort() {
|
|
||||||
local signal="$1"
|
|
||||||
echo
|
|
||||||
[ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
|
|
||||||
|
|
||||||
case "$signal" in
|
|
||||||
SIGINT)
|
|
||||||
msg_error "Script aborted by user (CTRL+C)"
|
|
||||||
exit 130
|
|
||||||
;;
|
;;
|
||||||
SIGTERM)
|
wget)
|
||||||
msg_error "Script terminated (SIGTERM)"
|
echo "Wget failed – URL unreachable or permission denied"
|
||||||
exit 143
|
|
||||||
;;
|
;;
|
||||||
*)
|
systemctl)
|
||||||
msg_error "Script interrupted (unknown signal: $signal)"
|
echo "Systemd unit failure – check service name and permissions"
|
||||||
exit 1
|
|
||||||
;;
|
;;
|
||||||
|
jq)
|
||||||
|
echo "jq parse error – malformed JSON or missing key"
|
||||||
|
;;
|
||||||
|
mariadb | mysql)
|
||||||
|
echo "MySQL/MariaDB command failed – check credentials or DB"
|
||||||
|
;;
|
||||||
|
unzip)
|
||||||
|
echo "unzip failed – corrupt file or missing permission"
|
||||||
|
;;
|
||||||
|
tar)
|
||||||
|
echo "tar failed – invalid format or missing binary"
|
||||||
|
;;
|
||||||
|
node | npm | pnpm | yarn)
|
||||||
|
echo "Node tool failed – check version compatibility or package.json"
|
||||||
|
;;
|
||||||
|
*) echo "" ;;
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
__handle_unexpected_error() {
|
catch_errors() {
|
||||||
local exit_code="$1"
|
set -Eeuo pipefail
|
||||||
echo
|
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||||
[ -n "${SPINNER_PID:-}" ] && kill "$SPINNER_PID" 2>/dev/null && wait "$SPINNER_PID" 2>/dev/null
|
|
||||||
|
|
||||||
case "$exit_code" in
|
|
||||||
1)
|
|
||||||
msg_error "Generic error occurred (exit code 1)"
|
|
||||||
;;
|
|
||||||
2)
|
|
||||||
msg_error "Misuse of shell builtins (exit code 2)"
|
|
||||||
;;
|
|
||||||
126)
|
|
||||||
msg_error "Command invoked cannot execute (exit code 126)"
|
|
||||||
;;
|
|
||||||
127)
|
|
||||||
msg_error "Command not found (exit code 127)"
|
|
||||||
;;
|
|
||||||
128)
|
|
||||||
msg_error "Invalid exit argument (exit code 128)"
|
|
||||||
;;
|
|
||||||
130)
|
|
||||||
msg_error "Script aborted by user (CTRL+C)"
|
|
||||||
;;
|
|
||||||
143)
|
|
||||||
msg_error "Script terminated by SIGTERM"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
msg_error "Unexpected error occurred (exit code $exit_code)"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
exit "$exit_code"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
@ -153,6 +80,13 @@ color() {
|
|||||||
CL=$(echo "\033[m")
|
CL=$(echo "\033[m")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Special for spinner and colorized output via printf
|
||||||
|
color_spinner() {
|
||||||
|
CS_YW=$'\033[33m'
|
||||||
|
CS_YWB=$'\033[93m'
|
||||||
|
CS_CL=$'\033[m'
|
||||||
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# Defines formatting helpers like tab, bold, and line reset sequences.
|
# Defines formatting helpers like tab, bold, and line reset sequences.
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
@ -196,6 +130,7 @@ icons() {
|
|||||||
ADVANCED="${TAB}🧩${TAB}${CL}"
|
ADVANCED="${TAB}🧩${TAB}${CL}"
|
||||||
FUSE="${TAB}🗂️${TAB}${CL}"
|
FUSE="${TAB}🗂️${TAB}${CL}"
|
||||||
HOURGLASS="${TAB}⏳${TAB}"
|
HOURGLASS="${TAB}⏳${TAB}"
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
@ -227,7 +162,7 @@ silent() {
|
|||||||
# Function to download & save header files
|
# Function to download & save header files
|
||||||
get_header() {
|
get_header() {
|
||||||
local app_name=$(echo "${APP,,}" | tr -d ' ')
|
local app_name=$(echo "${APP,,}" | tr -d ' ')
|
||||||
local app_type=${APP_TYPE:-ct} # Default 'ct'
|
local app_type=${APP_TYPE:-ct}
|
||||||
local header_url="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/${app_type}/headers/${app_name}"
|
local header_url="https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/${app_type}/headers/${app_name}"
|
||||||
local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}"
|
local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}"
|
||||||
|
|
||||||
@ -257,77 +192,39 @@ header_info() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
ensure_tput() {
|
||||||
# Performs a curl request with retry logic and inline feedback.
|
if ! command -v tput >/dev/null 2>&1; then
|
||||||
# ------------------------------------------------------------------------------
|
if grep -qi 'alpine' /etc/os-release; then
|
||||||
|
apk add --no-cache ncurses >/dev/null 2>&1
|
||||||
run_curl() {
|
elif command -v apt-get >/dev/null 2>&1; then
|
||||||
if [ "$VERBOSE" = "no" ]; then
|
apt-get update -qq >/dev/null
|
||||||
$STD curl "$@"
|
apt-get install -y -qq ncurses-bin >/dev/null 2>&1
|
||||||
else
|
fi
|
||||||
curl "$@"
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
curl_handler() {
|
is_alpine() {
|
||||||
set +e
|
local os_id="${var_os:-${PCT_OSTYPE:-}}"
|
||||||
trap 'set -e' RETURN
|
|
||||||
local args=()
|
|
||||||
local url=""
|
|
||||||
local max_retries=3
|
|
||||||
local delay=2
|
|
||||||
local attempt=1
|
|
||||||
local exit_code
|
|
||||||
local has_output_file=false
|
|
||||||
local result=""
|
|
||||||
|
|
||||||
# Parse arguments
|
if [[ -z "$os_id" && -f /etc/os-release ]]; then
|
||||||
for arg in "$@"; do
|
os_id="$(
|
||||||
if [[ "$arg" != -* && -z "$url" ]]; then
|
. /etc/os-release 2>/dev/null
|
||||||
url="$arg"
|
echo "${ID:-}"
|
||||||
fi
|
)"
|
||||||
[[ "$arg" == "-o" || "$arg" == --output ]] && has_output_file=true
|
|
||||||
args+=("$arg")
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -z "$url" ]]; then
|
|
||||||
msg_error "No valid URL or option entered for curl_handler"
|
|
||||||
return 1
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$STD msg_info "Fetching: $url"
|
[[ "$os_id" == "alpine" ]]
|
||||||
|
}
|
||||||
|
|
||||||
while [[ $attempt -le $max_retries ]]; do
|
is_verbose_mode() {
|
||||||
if $has_output_file; then
|
local verbose="${VERBOSE:-${var_verbose:-no}}"
|
||||||
$STD run_curl "${args[@]}"
|
local tty_status
|
||||||
exit_code=$?
|
if [[ -t 2 ]]; then
|
||||||
else
|
tty_status="interactive"
|
||||||
result=$(run_curl "${args[@]}")
|
else
|
||||||
exit_code=$?
|
tty_status="not-a-tty"
|
||||||
fi
|
fi
|
||||||
|
[[ "$verbose" != "no" || ! -t 2 ]]
|
||||||
if [[ $exit_code -eq 0 ]]; then
|
|
||||||
$STD msg_ok "Fetched: $url"
|
|
||||||
$has_output_file || printf '%s' "$result"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ((attempt >= max_retries)); then
|
|
||||||
# Read error log if it exists
|
|
||||||
if [ -s /tmp/curl_error.log ]; then
|
|
||||||
local curl_stderr
|
|
||||||
curl_stderr=$(</tmp/curl_error.log)
|
|
||||||
rm -f /tmp/curl_error.log
|
|
||||||
fi
|
|
||||||
__curl_err_handler "$exit_code" "$url" "${curl_stderr:-}"
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
$STD printf "\r\033[K${INFO}${YW}Retry $attempt/$max_retries in ${delay}s...${CL}" >&2
|
|
||||||
sleep "$delay"
|
|
||||||
((attempt++))
|
|
||||||
done
|
|
||||||
set -e
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
@ -372,144 +269,93 @@ fatal() {
|
|||||||
kill -INT $$
|
kill -INT $$
|
||||||
}
|
}
|
||||||
|
|
||||||
# Ensure POSIX compatibility across Alpine and Debian/Ubuntu
|
spinner() {
|
||||||
# === Spinner Start ===
|
local chars=(⠋ ⠙ ⠹ ⠸ ⠼ ⠴ ⠦ ⠧ ⠇ ⠏)
|
||||||
# Trap cleanup on various signals
|
local i=0
|
||||||
trap 'cleanup_spinner' EXIT INT TERM HUP
|
while true; do
|
||||||
|
local index=$((i++ % ${#chars[@]}))
|
||||||
spinner_frames=('⠋' '⠙' '⠹' '⠸' '⠼' '⠴' '⠦' '⠧' '⠇' '⠏')
|
printf "\r\033[2K%s %b" "${CS_YWB}${chars[$index]}${CS_CL}" "${CS_YWB}${SPINNER_MSG:-}${CS_CL}"
|
||||||
|
sleep 0.1
|
||||||
# === Spinner Start ===
|
done
|
||||||
start_spinner() {
|
}
|
||||||
local msg="$1"
|
|
||||||
local spin_i=0
|
clear_line() {
|
||||||
local interval=0.1
|
tput cr 2>/dev/null || echo -en "\r"
|
||||||
|
tput el 2>/dev/null || echo -en "\033[K"
|
||||||
stop_spinner
|
|
||||||
SPINNER_MSG="$msg"
|
|
||||||
SPINNER_ACTIVE=1
|
|
||||||
|
|
||||||
{
|
|
||||||
while [[ "$SPINNER_ACTIVE" -eq 1 ]]; do
|
|
||||||
if [[ -t 2 ]]; then
|
|
||||||
printf "\r\e[2K%s %b" "${TAB}${spinner_frames[spin_i]}${TAB}" "${YW}${SPINNER_MSG}${CL}" >&2
|
|
||||||
else
|
|
||||||
printf "%s...\n" "$SPINNER_MSG" >&2
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
spin_i=$(((spin_i + 1) % ${#spinner_frames[@]}))
|
|
||||||
sleep "$interval"
|
|
||||||
done
|
|
||||||
} &
|
|
||||||
|
|
||||||
local pid=$!
|
|
||||||
if ps -p "$pid" >/dev/null 2>&1; then
|
|
||||||
SPINNER_PID="$pid"
|
|
||||||
else
|
|
||||||
SPINNER_ACTIVE=0
|
|
||||||
SPINNER_PID=""
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# === Spinner Stop ===
|
|
||||||
stop_spinner() {
|
stop_spinner() {
|
||||||
if [[ "$SPINNER_ACTIVE" -eq 1 && -n "$SPINNER_PID" ]]; then
|
local pid="${SPINNER_PID:-}"
|
||||||
SPINNER_ACTIVE=0
|
[[ -z "$pid" && -f /tmp/.spinner.pid ]] && pid=$(</tmp/.spinner.pid)
|
||||||
|
|
||||||
if kill -0 "$SPINNER_PID" 2>/dev/null; then
|
if [[ -n "$pid" && "$pid" =~ ^[0-9]+$ ]]; then
|
||||||
kill "$SPINNER_PID" 2>/dev/null || true
|
if kill "$pid" 2>/dev/null; then
|
||||||
for _ in $(seq 1 10); do
|
sleep 0.05
|
||||||
sleep 0.05
|
kill -9 "$pid" 2>/dev/null || true
|
||||||
kill -0 "$SPINNER_PID" 2>/dev/null || break
|
wait "$pid" 2>/dev/null || true
|
||||||
done
|
|
||||||
fi
|
fi
|
||||||
|
rm -f /tmp/.spinner.pid
|
||||||
if [[ "$SPINNER_PID" =~ ^[0-9]+$ ]]; then
|
|
||||||
ps -p "$SPINNER_PID" -o pid= >/dev/null 2>&1 && wait "$SPINNER_PID" 2>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf "\r\e[2K" >&2
|
|
||||||
SPINNER_PID=""
|
|
||||||
fi
|
fi
|
||||||
}
|
|
||||||
|
|
||||||
cleanup_spinner() {
|
unset SPINNER_PID SPINNER_MSG
|
||||||
stop_spinner
|
stty sane 2>/dev/null || true
|
||||||
}
|
}
|
||||||
|
|
||||||
msg_info() {
|
msg_info() {
|
||||||
local msg="$1"
|
local msg="$1"
|
||||||
[[ -z "$msg" || -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
|
[[ -z "$msg" ]] && return
|
||||||
|
|
||||||
|
if ! declare -p MSG_INFO_SHOWN &>/dev/null || ! declare -A MSG_INFO_SHOWN &>/dev/null; then
|
||||||
|
declare -gA MSG_INFO_SHOWN=()
|
||||||
|
fi
|
||||||
|
[[ -n "${MSG_INFO_SHOWN["$msg"]+x}" ]] && return
|
||||||
MSG_INFO_SHOWN["$msg"]=1
|
MSG_INFO_SHOWN["$msg"]=1
|
||||||
|
|
||||||
stop_spinner
|
stop_spinner
|
||||||
|
SPINNER_MSG="$msg"
|
||||||
|
|
||||||
if [[ "${VERBOSE:-no}" == "no" && -t 2 ]]; then
|
if is_verbose_mode || is_alpine; then
|
||||||
start_spinner "$msg"
|
local HOURGLASS="${TAB}⏳${TAB}"
|
||||||
else
|
|
||||||
printf "\r\e[2K%s %b" "$HOURGLASS" "${YW}${msg}${CL}" >&2
|
printf "\r\e[2K%s %b" "$HOURGLASS" "${YW}${msg}${CL}" >&2
|
||||||
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
color_spinner
|
||||||
|
spinner &
|
||||||
|
SPINNER_PID=$!
|
||||||
|
echo "$SPINNER_PID" >/tmp/.spinner.pid
|
||||||
|
disown "$SPINNER_PID" 2>/dev/null || true
|
||||||
}
|
}
|
||||||
|
|
||||||
msg_ok() {
|
msg_ok() {
|
||||||
local msg="$1"
|
local msg="$1"
|
||||||
[[ -z "$msg" ]] && return
|
[[ -z "$msg" ]] && return
|
||||||
stop_spinner
|
stop_spinner
|
||||||
printf "\r\e[2K%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
|
clear_line
|
||||||
|
printf "%s %b\n" "$CM" "${GN}${msg}${CL}" >&2
|
||||||
unset MSG_INFO_SHOWN["$msg"]
|
unset MSG_INFO_SHOWN["$msg"]
|
||||||
}
|
}
|
||||||
|
|
||||||
msg_error() {
|
msg_error() {
|
||||||
local msg="$1"
|
|
||||||
[[ -z "$msg" ]] && return
|
|
||||||
stop_spinner
|
stop_spinner
|
||||||
printf "\r\e[2K%s %b\n" "$CROSS" "${RD}${msg}${CL}" >&2
|
local msg="$1"
|
||||||
|
echo -e "${BFR:-} ${CROSS:-✖️} ${RD}${msg}${CL}"
|
||||||
}
|
}
|
||||||
|
|
||||||
msg_warn() {
|
msg_warn() {
|
||||||
local msg="$1"
|
|
||||||
[[ -z "$msg" ]] && return
|
|
||||||
stop_spinner
|
stop_spinner
|
||||||
printf "\r\e[2K%s %b\n" "$INFO" "${YWB}${msg}${CL}" >&2
|
local msg="$1"
|
||||||
unset MSG_INFO_SHOWN["$msg"]
|
echo -e "${BFR:-} ${INFO:-ℹ️} ${YWB}${msg}${CL}"
|
||||||
}
|
}
|
||||||
|
|
||||||
msg_custom() {
|
msg_custom() {
|
||||||
local symbol="${1:-"[*]"}"
|
local symbol="${1:-"[*]"}"
|
||||||
local color="${2:-"\e[36m"}" # Default: Cyan
|
local color="${2:-"\e[36m"}"
|
||||||
local msg="${3:-}"
|
local msg="${3:-}"
|
||||||
|
|
||||||
[[ -z "$msg" ]] && return
|
[[ -z "$msg" ]] && return
|
||||||
stop_spinner 2>/dev/null || true
|
stop_spinner
|
||||||
printf "\r\e[2K%s %b\n" "$symbol" "${color}${msg}${CL:-\e[0m}" >&2
|
echo -e "${BFR:-} ${symbol} ${color}${msg}${CL:-\e[0m}"
|
||||||
}
|
printf "\r\033[K\e[?25h\n"
|
||||||
|
|
||||||
msg_progress() {
|
|
||||||
local current="$1"
|
|
||||||
local total="$2"
|
|
||||||
local label="$3"
|
|
||||||
local width=40
|
|
||||||
local filled percent bar empty
|
|
||||||
local fill_char="#"
|
|
||||||
local empty_char="-"
|
|
||||||
|
|
||||||
if ! [[ "$current" =~ ^[0-9]+$ ]] || ! [[ "$total" =~ ^[0-9]+$ ]] || [[ "$total" -eq 0 ]]; then
|
|
||||||
printf "\r\e[2K%s %b\n" "$CROSS" "${RD}Invalid progress input${CL}" >&2
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
percent=$(((current * 100) / total))
|
|
||||||
filled=$(((current * width) / total))
|
|
||||||
empty=$((width - filled))
|
|
||||||
|
|
||||||
bar=$(printf "%${filled}s" | tr ' ' "$fill_char")
|
|
||||||
bar+=$(printf "%${empty}s" | tr ' ' "$empty_char")
|
|
||||||
|
|
||||||
printf "\r\e[2K%s [%s] %3d%% %s" "${TAB}" "$bar" "$percent" "$label" >&2
|
|
||||||
|
|
||||||
if [[ "$current" -eq "$total" ]]; then
|
|
||||||
printf "\n" >&2
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
run_container_safe() {
|
run_container_safe() {
|
||||||
@ -560,3 +406,5 @@ check_or_create_swap() {
|
|||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trap 'stop_spinner' EXIT INT TERM
|
||||||
|
@ -21,36 +21,67 @@ fi
|
|||||||
# This sets error handling options and defines the error_handler function to handle errors
|
# This sets error handling options and defines the error_handler function to handle errors
|
||||||
set -Eeuo pipefail
|
set -Eeuo pipefail
|
||||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||||
|
trap on_exit EXIT
|
||||||
|
trap on_interrupt INT
|
||||||
|
trap on_terminate TERM
|
||||||
|
|
||||||
|
function on_exit() {
|
||||||
|
local exit_code="$?"
|
||||||
|
[[ -n "${lockfile:-}" && -e "$lockfile" ]] && rm -f "$lockfile"
|
||||||
|
exit "$exit_code"
|
||||||
|
}
|
||||||
|
|
||||||
# This function handles errors
|
|
||||||
function error_handler() {
|
function error_handler() {
|
||||||
printf "\e[?25h"
|
|
||||||
local exit_code="$?"
|
local exit_code="$?"
|
||||||
local line_number="$1"
|
local line_number="$1"
|
||||||
local command="$2"
|
local command="$2"
|
||||||
local error_message="${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}"
|
printf "\e[?25h"
|
||||||
echo -e "\n$error_message\n"
|
echo -e "\n${RD}[ERROR]${CL} in line ${RD}$line_number${CL}: exit code ${RD}$exit_code${CL}: while executing command ${YW}$command${CL}\n"
|
||||||
exit 200
|
exit "$exit_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
function on_interrupt() {
|
||||||
|
echo -e "\n${RD}Interrupted by user (SIGINT)${CL}"
|
||||||
|
exit 130
|
||||||
|
}
|
||||||
|
|
||||||
|
function on_terminate() {
|
||||||
|
echo -e "\n${RD}Terminated by signal (SIGTERM)${CL}"
|
||||||
|
exit 143
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_storage_support() {
|
||||||
|
local CONTENT="$1"
|
||||||
|
local -a VALID_STORAGES=()
|
||||||
|
|
||||||
|
while IFS= read -r line; do
|
||||||
|
local STORAGE=$(awk '{print $1}' <<<"$line")
|
||||||
|
[[ "$STORAGE" == "storage" || -z "$STORAGE" ]] && continue
|
||||||
|
VALID_STORAGES+=("$STORAGE")
|
||||||
|
done < <(pvesm status -content "$CONTENT" 2>/dev/null | awk 'NR>1')
|
||||||
|
|
||||||
|
[[ ${#VALID_STORAGES[@]} -gt 0 ]]
|
||||||
}
|
}
|
||||||
|
|
||||||
# This checks for the presence of valid Container Storage and Template Storage locations
|
# This checks for the presence of valid Container Storage and Template Storage locations
|
||||||
msg_info "Validating Storage"
|
msg_info "Validating Storage"
|
||||||
VALIDCT=$(pvesm status -content rootdir | awk 'NR>1')
|
if ! check_storage_support "rootdir"; then
|
||||||
if [ -z "$VALIDCT" ]; then
|
|
||||||
msg_error "Unable to detect a valid Container Storage location."
|
msg_error "No valid storage found for 'rootdir' (Container)."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
VALIDTMP=$(pvesm status -content vztmpl | awk 'NR>1')
|
if ! check_storage_support "vztmpl"; then
|
||||||
if [ -z "$VALIDTMP" ]; then
|
|
||||||
msg_error "Unable to detect a valid Template Storage location."
|
msg_error "No valid storage found for 'vztmpl' (Template)."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
msg_ok "Validated Storage (rootdir / vztmpl)."
|
||||||
|
|
||||||
# This function is used to select the storage class and determine the corresponding storage content type and label.
|
# This function is used to select the storage class and determine the corresponding storage content type and label.
|
||||||
function select_storage() {
|
function select_storage() {
|
||||||
local CLASS=$1
|
local CLASS=$1 CONTENT CONTENT_LABEL
|
||||||
local CONTENT
|
|
||||||
local CONTENT_LABEL
|
|
||||||
case $CLASS in
|
case $CLASS in
|
||||||
container)
|
container)
|
||||||
CONTENT='rootdir'
|
CONTENT='rootdir'
|
||||||
@ -60,51 +91,72 @@ function select_storage() {
|
|||||||
CONTENT='vztmpl'
|
CONTENT='vztmpl'
|
||||||
CONTENT_LABEL='Container template'
|
CONTENT_LABEL='Container template'
|
||||||
;;
|
;;
|
||||||
*) false || {
|
iso)
|
||||||
msg_error "Invalid storage class."
|
CONTENT='iso'
|
||||||
exit 201
|
CONTENT_LABEL='ISO image'
|
||||||
} ;;
|
;;
|
||||||
|
images)
|
||||||
|
CONTENT='images'
|
||||||
|
CONTENT_LABEL='VM Disk image'
|
||||||
|
;;
|
||||||
|
backup)
|
||||||
|
CONTENT='backup'
|
||||||
|
CONTENT_LABEL='Backup'
|
||||||
|
;;
|
||||||
|
snippets)
|
||||||
|
CONTENT='snippets'
|
||||||
|
CONTENT_LABEL='Snippets'
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
msg_error "Invalid storage class '$CLASS'"
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Collect storage options
|
local -a MENU
|
||||||
local -a MENU
|
local -A STORAGE_MAP
|
||||||
local MSG_MAX_LENGTH=0
|
local COL_WIDTH=0
|
||||||
|
|
||||||
while read -r TAG TYPE _ _ _ FREE _; do
|
while read -r TAG TYPE _ TOTAL USED FREE _; do
|
||||||
local TYPE_PADDED
|
[[ -n "$TAG" && -n "$TYPE" ]] || continue
|
||||||
local FREE_FMT
|
local DISPLAY="${TAG} (${TYPE})"
|
||||||
|
local USED_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$USED")
|
||||||
TYPE_PADDED=$(printf "%-10s" "$TYPE")
|
local FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.1f <<<"$FREE")
|
||||||
FREE_FMT=$(numfmt --to=iec --from-unit=K --format %.2f <<<"$FREE")B
|
local INFO="Free: ${FREE_FMT}B Used: ${USED_FMT}B"
|
||||||
local ITEM="Type: $TYPE_PADDED Free: $FREE_FMT"
|
STORAGE_MAP["$DISPLAY"]="$TAG"
|
||||||
|
MENU+=("$DISPLAY" "$INFO" "OFF")
|
||||||
((${#ITEM} + 2 > MSG_MAX_LENGTH)) && MSG_MAX_LENGTH=$((${#ITEM} + 2))
|
((${#DISPLAY} > COL_WIDTH)) && COL_WIDTH=${#DISPLAY}
|
||||||
|
|
||||||
MENU+=("$TAG" "$ITEM" "OFF")
|
|
||||||
done < <(pvesm status -content "$CONTENT" | awk 'NR>1')
|
done < <(pvesm status -content "$CONTENT" | awk 'NR>1')
|
||||||
|
|
||||||
local OPTION_COUNT=$((${#MENU[@]} / 3))
|
if [ ${#MENU[@]} -eq 0 ]; then
|
||||||
|
msg_error "No storage found for content type '$CONTENT'."
|
||||||
|
return 2
|
||||||
|
fi
|
||||||
|
|
||||||
# Auto-select if only one option available
|
if [ $((${#MENU[@]} / 3)) -eq 1 ]; then
|
||||||
if [[ "$OPTION_COUNT" -eq 1 ]]; then
|
STORAGE_RESULT="${STORAGE_MAP[${MENU[0]}]}"
|
||||||
echo "${MENU[0]}"
|
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Display selection menu
|
local WIDTH=$((COL_WIDTH + 42))
|
||||||
local STORAGE
|
while true; do
|
||||||
while [[ -z "${STORAGE:+x}" ]]; do
|
local DISPLAY_SELECTED=$(whiptail --backtitle "Proxmox VE Helper Scripts" \
|
||||||
STORAGE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Storage Pools" --radiolist \
|
--title "Storage Pools" \
|
||||||
"Select the storage pool to use for the ${CONTENT_LABEL,,}.\nUse the spacebar to make a selection.\n" \
|
--radiolist "Which storage pool for ${CONTENT_LABEL,,}?\n(Spacebar to select)" \
|
||||||
16 $((MSG_MAX_LENGTH + 23)) 6 \
|
16 "$WIDTH" 6 "${MENU[@]}" 3>&1 1>&2 2>&3)
|
||||||
"${MENU[@]}" 3>&1 1>&2 2>&3) || {
|
|
||||||
msg_error "Storage selection cancelled."
|
|
||||||
exit 202
|
|
||||||
}
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "$STORAGE"
|
[[ $? -ne 0 ]] && return 3
|
||||||
|
|
||||||
|
if [[ -z "$DISPLAY_SELECTED" || -z "${STORAGE_MAP[$DISPLAY_SELECTED]+_}" ]]; then
|
||||||
|
whiptail --msgbox "No valid storage selected. Please try again." 8 58
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
STORAGE_RESULT="${STORAGE_MAP[$DISPLAY_SELECTED]}"
|
||||||
|
return 0
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test if required variables are set
|
# Test if required variables are set
|
||||||
[[ "${CTID:-}" ]] || {
|
[[ "${CTID:-}" ]] || {
|
||||||
msg_error "You need to set 'CTID' variable."
|
msg_error "You need to set 'CTID' variable."
|
||||||
@ -129,13 +181,55 @@ if qm status "$CTID" &>/dev/null || pct status "$CTID" &>/dev/null; then
|
|||||||
exit 206
|
exit 206
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Get template storage
|
# DEFAULT_FILE="/usr/local/community-scripts/default_storage"
|
||||||
TEMPLATE_STORAGE=$(select_storage template)
|
# if [[ -f "$DEFAULT_FILE" ]]; then
|
||||||
msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
|
# source "$DEFAULT_FILE"
|
||||||
|
# if [[ -n "$TEMPLATE_STORAGE" && -n "$CONTAINER_STORAGE" ]]; then
|
||||||
|
# msg_info "Using default storage configuration from: $DEFAULT_FILE"
|
||||||
|
# msg_ok "Template Storage: ${BL}$TEMPLATE_STORAGE${CL} ${GN}|${CL} Container Storage: ${BL}$CONTAINER_STORAGE${CL}"
|
||||||
|
# else
|
||||||
|
# msg_warn "Default storage file exists but is incomplete – falling back to manual selection"
|
||||||
|
# TEMPLATE_STORAGE=$(select_storage template)
|
||||||
|
# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
|
||||||
|
# CONTAINER_STORAGE=$(select_storage container)
|
||||||
|
# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
|
||||||
|
# fi
|
||||||
|
# else
|
||||||
|
# # TEMPLATE STORAGE SELECTION
|
||||||
|
# # Template Storage
|
||||||
|
# while true; do
|
||||||
|
# TEMPLATE_STORAGE=$(select_storage template)
|
||||||
|
# if [[ -n "$TEMPLATE_STORAGE" ]]; then
|
||||||
|
# msg_ok "Using ${BL}$TEMPLATE_STORAGE${CL} ${GN}for Template Storage."
|
||||||
|
# break
|
||||||
|
# fi
|
||||||
|
# msg_warn "No valid template storage selected. Please try again."
|
||||||
|
# done
|
||||||
|
|
||||||
# Get container storage
|
# while true; do
|
||||||
CONTAINER_STORAGE=$(select_storage container)
|
# CONTAINER_STORAGE=$(select_storage container)
|
||||||
msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
|
# if [[ -n "$CONTAINER_STORAGE" ]]; then
|
||||||
|
# msg_ok "Using ${BL}$CONTAINER_STORAGE${CL} ${GN}for Container Storage."
|
||||||
|
# break
|
||||||
|
# fi
|
||||||
|
# msg_warn "No valid container storage selected. Please try again."
|
||||||
|
# done
|
||||||
|
|
||||||
|
# fi
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
if select_storage template; then
|
||||||
|
TEMPLATE_STORAGE="$STORAGE_RESULT"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
if select_storage container; then
|
||||||
|
CONTAINER_STORAGE="$STORAGE_RESULT"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
# Check free space on selected container storage
|
# Check free space on selected container storage
|
||||||
STORAGE_FREE=$(pvesm status | awk -v s="$CONTAINER_STORAGE" '$1 == s { print $6 }')
|
STORAGE_FREE=$(pvesm status | awk -v s="$CONTAINER_STORAGE" '$1 == s { print $6 }')
|
||||||
@ -204,7 +298,7 @@ if ! pveam list "$TEMPLATE_STORAGE" | grep -q "$TEMPLATE" || ! zstdcat "$TEMPLAT
|
|||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
msg_ok "LXC Template '$TEMPLATE' is ready to use."
|
msg_info "Creating LXC Container"
|
||||||
# Check and fix subuid/subgid
|
# Check and fix subuid/subgid
|
||||||
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
|
grep -q "root:100000:65536" /etc/subuid || echo "root:100000:65536" >>/etc/subuid
|
||||||
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
|
grep -q "root:100000:65536" /etc/subgid || echo "root:100000:65536" >>/etc/subgid
|
||||||
@ -215,12 +309,15 @@ PCT_OPTIONS=(${PCT_OPTIONS[@]:-${DEFAULT_PCT_OPTIONS[@]}})
|
|||||||
|
|
||||||
# Secure creation of the LXC container with lock and template check
|
# Secure creation of the LXC container with lock and template check
|
||||||
lockfile="/tmp/template.${TEMPLATE}.lock"
|
lockfile="/tmp/template.${TEMPLATE}.lock"
|
||||||
exec 9>"$lockfile"
|
exec 9>"$lockfile" >/dev/null 2>&1 || {
|
||||||
|
msg_error "Failed to create lock file '$lockfile'."
|
||||||
|
exit 200
|
||||||
|
}
|
||||||
flock -w 60 9 || {
|
flock -w 60 9 || {
|
||||||
msg_error "Timeout while waiting for template lock"
|
msg_error "Timeout while waiting for template lock"
|
||||||
exit 211
|
exit 211
|
||||||
}
|
}
|
||||||
msg_info "Creating LXC Container"
|
|
||||||
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
|
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
|
||||||
msg_error "Container creation failed. Checking if template is corrupted or incomplete."
|
msg_error "Container creation failed. Checking if template is corrupted or incomplete."
|
||||||
|
|
||||||
@ -252,16 +349,23 @@ if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[
|
|||||||
sleep 1 # I/O-Sync-Delay
|
sleep 1 # I/O-Sync-Delay
|
||||||
|
|
||||||
msg_ok "Re-downloaded LXC Template"
|
msg_ok "Re-downloaded LXC Template"
|
||||||
|
fi
|
||||||
|
|
||||||
if ! pct create "$CTID" "${TEMPLATE_STORAGE}:vztmpl/${TEMPLATE}" "${PCT_OPTIONS[@]}" &>/dev/null; then
|
if ! pct list | awk '{print $1}' | grep -qx "$CTID"; then
|
||||||
msg_error "Container creation failed after re-downloading template."
|
msg_error "Container ID $CTID not listed in 'pct list' – unexpected failure."
|
||||||
exit 200
|
exit 215
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! grep -q '^rootfs:' "/etc/pve/lxc/$CTID.conf"; then
|
||||||
|
msg_error "RootFS entry missing in container config – storage not correctly assigned."
|
||||||
|
exit 216
|
||||||
|
fi
|
||||||
|
|
||||||
|
if grep -q '^hostname:' "/etc/pve/lxc/$CTID.conf"; then
|
||||||
|
CT_HOSTNAME=$(grep '^hostname:' "/etc/pve/lxc/$CTID.conf" | awk '{print $2}')
|
||||||
|
if [[ ! "$CT_HOSTNAME" =~ ^[a-z0-9-]+$ ]]; then
|
||||||
|
msg_warn "Hostname '$CT_HOSTNAME' contains invalid characters – may cause issues with networking or DNS."
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! pct status "$CTID" &>/dev/null; then
|
|
||||||
msg_error "Container not found after pct create – assuming failure."
|
|
||||||
exit 210
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."
|
msg_ok "LXC Container ${BL}$CTID${CL} ${GN}was successfully created."
|
||||||
|
@ -62,9 +62,11 @@ setting_up_container() {
|
|||||||
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
|
rm -rf /usr/lib/python3.*/EXTERNALLY-MANAGED
|
||||||
systemctl disable -q --now systemd-networkd-wait-online.service
|
systemctl disable -q --now systemd-networkd-wait-online.service
|
||||||
msg_ok "Set up Container OS"
|
msg_ok "Set up Container OS"
|
||||||
msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
|
#msg_custom "${CM}" "${GN}" "Network Connected: ${BL}$(hostname -I)"
|
||||||
|
msg_ok "Network Connected: ${BL}$(hostname -I)"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
||||||
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
# This function checks the network connection by pinging a known IP address and prompts the user to continue if the internet is not connected
|
||||||
network_check() {
|
network_check() {
|
||||||
set +e
|
set +e
|
||||||
@ -72,6 +74,7 @@ network_check() {
|
|||||||
ipv4_connected=false
|
ipv4_connected=false
|
||||||
ipv6_connected=false
|
ipv6_connected=false
|
||||||
sleep 1
|
sleep 1
|
||||||
|
|
||||||
# Check IPv4 connectivity to Google, Cloudflare & Quad9 DNS servers.
|
# Check IPv4 connectivity to Google, Cloudflare & Quad9 DNS servers.
|
||||||
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
|
if ping -c 1 -W 1 1.1.1.1 &>/dev/null || ping -c 1 -W 1 8.8.8.8 &>/dev/null || ping -c 1 -W 1 9.9.9.9 &>/dev/null; then
|
||||||
msg_ok "IPv4 Internet Connected"
|
msg_ok "IPv4 Internet Connected"
|
||||||
@ -100,25 +103,26 @@ network_check() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# DNS resolution checks for GitHub-related domains (IPv4 and/or IPv6)
|
# DNS resolution checks for GitHub-related domains (IPv4 and/or IPv6)
|
||||||
GITHUB_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com")
|
GIT_HOSTS=("github.com" "raw.githubusercontent.com" "api.github.com" "git.community-scripts.org")
|
||||||
GITHUB_STATUS="GitHub DNS:"
|
GIT_STATUS="Git DNS:"
|
||||||
DNS_FAILED=false
|
DNS_FAILED=false
|
||||||
|
|
||||||
for HOST in "${GITHUB_HOSTS[@]}"; do
|
for HOST in "${GIT_HOSTS[@]}"; do
|
||||||
RESOLVEDIP=$(getent hosts "$HOST" | awk '{ print $1 }' | grep -E '(^([0-9]{1,3}\.){3}[0-9]{1,3}$)|(^[a-fA-F0-9:]+$)' | head -n1)
|
RESOLVEDIP=$(getent hosts "$HOST" | awk '{ print $1 }' | grep -E '(^([0-9]{1,3}\.){3}[0-9]{1,3}$)|(^[a-fA-F0-9:]+$)' | head -n1)
|
||||||
if [[ -z "$RESOLVEDIP" ]]; then
|
if [[ -z "$RESOLVEDIP" ]]; then
|
||||||
GITHUB_STATUS+="$HOST:($DNSFAIL)"
|
GIT_STATUS+="$HOST:($DNSFAIL)"
|
||||||
DNS_FAILED=true
|
DNS_FAILED=true
|
||||||
else
|
else
|
||||||
GITHUB_STATUS+=" $HOST:($DNSOK)"
|
GIT_STATUS+=" $HOST:($DNSOK)"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [[ "$DNS_FAILED" == true ]]; then
|
if [[ "$DNS_FAILED" == true ]]; then
|
||||||
fatal "$GITHUB_STATUS"
|
fatal "$GIT_STATUS"
|
||||||
else
|
else
|
||||||
msg_ok "$GITHUB_STATUS"
|
msg_ok "$GIT_STATUS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
trap 'error_handler $LINENO "$BASH_COMMAND"' ERR
|
||||||
}
|
}
|
||||||
|
@ -239,10 +239,14 @@ setup_mariadb() {
|
|||||||
DISTRO_CODENAME="$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)"
|
DISTRO_CODENAME="$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)"
|
||||||
CURRENT_OS="$(awk -F= '/^ID=/{print $2}' /etc/os-release)"
|
CURRENT_OS="$(awk -F= '/^ID=/{print $2}' /etc/os-release)"
|
||||||
|
|
||||||
|
if ! curl -fsI http://mirror.mariadb.org/repo/ >/dev/null; then
|
||||||
|
msg_error "MariaDB mirror not reachable"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
msg_info "Setting up MariaDB $MARIADB_VERSION"
|
msg_info "Setting up MariaDB $MARIADB_VERSION"
|
||||||
# grab dynamic latest LTS version
|
# grab dynamic latest LTS version
|
||||||
if [[ "$MARIADB_VERSION" == "latest" ]]; then
|
if [[ "$MARIADB_VERSION" == "latest" ]]; then
|
||||||
$STD msg_info "Resolving latest GA MariaDB version"
|
|
||||||
MARIADB_VERSION=$(curl -fsSL http://mirror.mariadb.org/repo/ |
|
MARIADB_VERSION=$(curl -fsSL http://mirror.mariadb.org/repo/ |
|
||||||
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
|
grep -Eo '[0-9]+\.[0-9]+\.[0-9]+/' |
|
||||||
grep -vE 'rc/|rolling/' |
|
grep -vE 'rc/|rolling/' |
|
||||||
@ -253,7 +257,6 @@ setup_mariadb() {
|
|||||||
msg_error "Could not determine latest GA MariaDB version"
|
msg_error "Could not determine latest GA MariaDB version"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
$STD msg_ok "Latest GA MariaDB version is $MARIADB_VERSION"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local CURRENT_VERSION=""
|
local CURRENT_VERSION=""
|
||||||
@ -278,7 +281,6 @@ setup_mariadb() {
|
|||||||
$STD msg_info "Setup MariaDB $MARIADB_VERSION"
|
$STD msg_info "Setup MariaDB $MARIADB_VERSION"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$STD msg_info "Setting up MariaDB Repository"
|
|
||||||
curl -fsSL "https://mariadb.org/mariadb_release_signing_key.asc" |
|
curl -fsSL "https://mariadb.org/mariadb_release_signing_key.asc" |
|
||||||
gpg --dearmor -o /etc/apt/trusted.gpg.d/mariadb.gpg
|
gpg --dearmor -o /etc/apt/trusted.gpg.d/mariadb.gpg
|
||||||
|
|
||||||
@ -432,6 +434,13 @@ function setup_php() {
|
|||||||
$STD apt-get update
|
$STD apt-get update
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
for pkg in $MODULE_LIST; do
|
||||||
|
if ! apt-cache show "$pkg" >/dev/null 2>&1; then
|
||||||
|
msg_error "Package not found: $pkg"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
local MODULE_LIST="php${PHP_VERSION}"
|
local MODULE_LIST="php${PHP_VERSION}"
|
||||||
IFS=',' read -ra MODULES <<<"$COMBINED_MODULES"
|
IFS=',' read -ra MODULES <<<"$COMBINED_MODULES"
|
||||||
for mod in "${MODULES[@]}"; do
|
for mod in "${MODULES[@]}"; do
|
||||||
@ -441,6 +450,10 @@ function setup_php() {
|
|||||||
if [[ "$PHP_FPM" == "YES" ]]; then
|
if [[ "$PHP_FPM" == "YES" ]]; then
|
||||||
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
MODULE_LIST+=" php${PHP_VERSION}-fpm"
|
||||||
fi
|
fi
|
||||||
|
if [[ "$PHP_APACHE" == "YES" ]]; then
|
||||||
|
$STD apt-get install -y apache2
|
||||||
|
$STD systemctl restart apache2 || true
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ "$PHP_APACHE" == "YES" ]] && [[ -n "$CURRENT_PHP" ]]; then
|
if [[ "$PHP_APACHE" == "YES" ]] && [[ -n "$CURRENT_PHP" ]]; then
|
||||||
if [[ -f /etc/apache2/mods-enabled/php${CURRENT_PHP}.load ]]; then
|
if [[ -f /etc/apache2/mods-enabled/php${CURRENT_PHP}.load ]]; then
|
||||||
@ -456,10 +469,6 @@ function setup_php() {
|
|||||||
$STD apt-get install -y $MODULE_LIST
|
$STD apt-get install -y $MODULE_LIST
|
||||||
msg_ok "Setup PHP $PHP_VERSION"
|
msg_ok "Setup PHP $PHP_VERSION"
|
||||||
|
|
||||||
if [[ "$PHP_APACHE" == "YES" ]]; then
|
|
||||||
$STD systemctl restart apache2 || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$PHP_FPM" == "YES" ]]; then
|
if [[ "$PHP_FPM" == "YES" ]]; then
|
||||||
$STD systemctl enable php${PHP_VERSION}-fpm
|
$STD systemctl enable php${PHP_VERSION}-fpm
|
||||||
$STD systemctl restart php${PHP_VERSION}-fpm
|
$STD systemctl restart php${PHP_VERSION}-fpm
|
||||||
@ -649,6 +658,15 @@ function setup_mongodb() {
|
|||||||
DISTRO_ID=$(awk -F= '/^ID=/{ gsub(/"/,"",$2); print $2 }' /etc/os-release)
|
DISTRO_ID=$(awk -F= '/^ID=/{ gsub(/"/,"",$2); print $2 }' /etc/os-release)
|
||||||
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{ print $2 }' /etc/os-release)
|
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{ print $2 }' /etc/os-release)
|
||||||
|
|
||||||
|
# Check AVX support
|
||||||
|
if ! grep -qm1 'avx[^ ]*' /proc/cpuinfo; then
|
||||||
|
local major="${MONGO_VERSION%%.*}"
|
||||||
|
if ((major > 5)); then
|
||||||
|
msg_error "MongoDB ${MONGO_VERSION} requires AVX support, which is not available on this system."
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
case "$DISTRO_ID" in
|
case "$DISTRO_ID" in
|
||||||
ubuntu)
|
ubuntu)
|
||||||
MONGO_BASE_URL="https://repo.mongodb.org/apt/ubuntu"
|
MONGO_BASE_URL="https://repo.mongodb.org/apt/ubuntu"
|
||||||
@ -757,6 +775,7 @@ function fetch_and_deploy_gh_release() {
|
|||||||
local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile
|
local mode="${3:-tarball}" # tarball | binary | prebuild | singlefile
|
||||||
local version="${4:-latest}"
|
local version="${4:-latest}"
|
||||||
local target="${5:-/opt/$app}"
|
local target="${5:-/opt/$app}"
|
||||||
|
local asset_pattern="${6:-}"
|
||||||
|
|
||||||
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
||||||
local version_file="$HOME/.${app_lc}"
|
local version_file="$HOME/.${app_lc}"
|
||||||
@ -851,9 +870,9 @@ function fetch_and_deploy_gh_release() {
|
|||||||
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
|
assets=$(echo "$json" | jq -r '.assets[].browser_download_url')
|
||||||
|
|
||||||
# If explicit filename pattern is provided (param $6), match that first
|
# If explicit filename pattern is provided (param $6), match that first
|
||||||
if [[ -n "$6" ]]; then
|
if [[ -n "$asset_pattern" ]]; then
|
||||||
for u in $assets; do
|
for u in $assets; do
|
||||||
[[ "$u" =~ $6 || "$u" == *"$6" ]] && url_match="$u" && break
|
[[ "$u" =~ $asset_pattern || "$u" == *"$asset_pattern" ]] && url_match="$u" && break
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -932,13 +951,32 @@ function fetch_and_deploy_gh_release() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
mkdir -p "$target"
|
mkdir -p "$target"
|
||||||
if [[ "$filename" == *.zip ]]; then
|
if [[ "$filename" == *.zip ]]; then
|
||||||
if ! command -v unzip &>/dev/null; then
|
if ! command -v unzip &>/dev/null; then
|
||||||
$STD apt-get install -y unzip
|
$STD apt-get install -y unzip
|
||||||
fi
|
fi
|
||||||
$STD unzip "$tmpdir/$filename" -d "$target"
|
|
||||||
|
local top_level_entries
|
||||||
|
top_level_entries=$(unzip -l "$tmpdir/$filename" | awk '{print $4}' | grep -v '^$' | cut -d/ -f1 | sort -u)
|
||||||
|
|
||||||
|
if [[ $(wc -l <<<"$top_level_entries") -eq 1 ]]; then
|
||||||
|
unzip -q "$tmpdir/$filename" -d "$tmpdir/unzip"
|
||||||
|
shopt -s dotglob nullglob
|
||||||
|
cp -r "$tmpdir/unzip/"* "$target/"
|
||||||
|
shopt -u dotglob nullglob
|
||||||
|
else
|
||||||
|
unzip -q "$tmpdir/$filename" -d "$target"
|
||||||
|
fi
|
||||||
|
|
||||||
elif [[ "$filename" == *.tar.* ]]; then
|
elif [[ "$filename" == *.tar.* ]]; then
|
||||||
tar --strip-components=1 -xf "$tmpdir/$filename" -C "$target"
|
local top_level_entries
|
||||||
|
top_level_entries=$(tar -tf "$tmpdir/$filename" | cut -d/ -f1 | sort -u)
|
||||||
|
|
||||||
|
if [[ $(wc -l <<<"$top_level_entries") -eq 1 ]]; then
|
||||||
|
tar --strip-components=1 -xf "$tmpdir/$filename" -C "$target"
|
||||||
|
else
|
||||||
|
tar -xf "$tmpdir/$filename" -C "$target"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
msg_error "Unsupported archive format: $filename"
|
msg_error "Unsupported archive format: $filename"
|
||||||
rm -rf "$tmpdir"
|
rm -rf "$tmpdir"
|
||||||
|
@ -162,6 +162,21 @@ update_installation() {
|
|||||||
generate_service >/lib/systemd/system/iptag.service
|
generate_service >/lib/systemd/system/iptag.service
|
||||||
msg_ok "Updated service file"
|
msg_ok "Updated service file"
|
||||||
|
|
||||||
|
msg_info "Creating manual run command"
|
||||||
|
cat <<'EOF' >/usr/local/bin/iptag-run
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
CONFIG_FILE="/opt/iptag/iptag.conf"
|
||||||
|
SCRIPT_FILE="/opt/iptag/iptag"
|
||||||
|
if [[ ! -f "$SCRIPT_FILE" ]]; then
|
||||||
|
echo "❌ Main script not found: $SCRIPT_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
export FORCE_SINGLE_RUN=true
|
||||||
|
exec "$SCRIPT_FILE"
|
||||||
|
EOF
|
||||||
|
chmod +x /usr/local/bin/iptag-run
|
||||||
|
msg_ok "Created iptag-run executable - You can execute this manually by entering “iptag-run” in the Proxmox host, so the script is executed by hand."
|
||||||
|
|
||||||
msg_info "Restarting service"
|
msg_info "Restarting service"
|
||||||
systemctl daemon-reload &>/dev/null
|
systemctl daemon-reload &>/dev/null
|
||||||
systemctl enable -q --now iptag.service &>/dev/null
|
systemctl enable -q --now iptag.service &>/dev/null
|
||||||
@ -194,18 +209,19 @@ LXC_STATUS_CHECK_INTERVAL=300
|
|||||||
FORCE_UPDATE_INTERVAL=7200
|
FORCE_UPDATE_INTERVAL=7200
|
||||||
|
|
||||||
# Performance optimizations
|
# Performance optimizations
|
||||||
VM_IP_CACHE_TTL=120
|
VM_IP_CACHE_TTL=300
|
||||||
MAX_PARALLEL_VM_CHECKS=5
|
MAX_PARALLEL_VM_CHECKS=1
|
||||||
|
|
||||||
# LXC performance optimizations
|
# LXC performance optimizations
|
||||||
LXC_IP_CACHE_TTL=0
|
LXC_IP_CACHE_TTL=300
|
||||||
MAX_PARALLEL_LXC_CHECKS=7
|
MAX_PARALLEL_LXC_CHECKS=2
|
||||||
|
|
||||||
# Extreme LXC optimizations
|
# Extreme LXC optimizations
|
||||||
LXC_BATCH_SIZE=20
|
LXC_BATCH_SIZE=3
|
||||||
LXC_STATUS_CACHE_TTL=30
|
LXC_STATUS_CACHE_TTL=300
|
||||||
LXC_AGGRESSIVE_CACHING=true
|
LXC_AGGRESSIVE_CACHING=true
|
||||||
LXC_SKIP_SLOW_METHODS=true
|
LXC_SKIP_SLOW_METHODS=true
|
||||||
|
LXC_ALLOW_FORCED_COMMANDS=false
|
||||||
|
|
||||||
# Debug settings (set to true to enable debugging)
|
# Debug settings (set to true to enable debugging)
|
||||||
DEBUG=false
|
DEBUG=false
|
||||||
@ -222,8 +238,8 @@ After=network.target
|
|||||||
[Service]
|
[Service]
|
||||||
Type=simple
|
Type=simple
|
||||||
ExecStart=/opt/iptag/iptag
|
ExecStart=/opt/iptag/iptag
|
||||||
Restart=always
|
Restart=on-failure
|
||||||
RestartSec=1
|
RestartSec=10
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
@ -576,7 +592,9 @@ update_tags() {
|
|||||||
|
|
||||||
if [[ "$type" == "lxc" ]]; then
|
if [[ "$type" == "lxc" ]]; then
|
||||||
current_ips_full=$(get_lxc_ips "${vmid}")
|
current_ips_full=$(get_lxc_ips "${vmid}")
|
||||||
local current_tags_raw=$(pct config "${vmid}" 2>/dev/null | grep tags | awk '{print $2}')
|
while IFS= read -r line; do
|
||||||
|
[[ "$line" == tags:* ]] && current_tags_raw="${line#tags: }" && break
|
||||||
|
done < <(pct config "$vmid" 2>/dev/null)
|
||||||
else
|
else
|
||||||
current_ips_full=$(get_vm_ips "${vmid}")
|
current_ips_full=$(get_vm_ips "${vmid}")
|
||||||
local vm_config="/etc/pve/qemu-server/${vmid}.conf"
|
local vm_config="/etc/pve/qemu-server/${vmid}.conf"
|
||||||
@ -789,7 +807,10 @@ check_status_changed() {
|
|||||||
check() {
|
check() {
|
||||||
local current_time changes_detected=false
|
local current_time changes_detected=false
|
||||||
current_time=$(date +%s)
|
current_time=$(date +%s)
|
||||||
|
|
||||||
|
local update_lxc=false
|
||||||
|
local update_vm=false
|
||||||
|
|
||||||
# Periodic cache cleanup (every 10 minutes)
|
# Periodic cache cleanup (every 10 minutes)
|
||||||
local time_since_last_cleanup=$((current_time - ${last_cleanup_time:-0}))
|
local time_since_last_cleanup=$((current_time - ${last_cleanup_time:-0}))
|
||||||
if [[ $time_since_last_cleanup -ge 600 ]]; then
|
if [[ $time_since_last_cleanup -ge 600 ]]; then
|
||||||
@ -801,60 +822,56 @@ check() {
|
|||||||
# Check LXC status
|
# Check LXC status
|
||||||
local time_since_last_lxc_check=$((current_time - last_lxc_status_check_time))
|
local time_since_last_lxc_check=$((current_time - last_lxc_status_check_time))
|
||||||
if [[ "${LXC_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
|
if [[ "${LXC_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
|
||||||
[[ "${time_since_last_lxc_check}" -ge "${LXC_STATUS_CHECK_INTERVAL:-60}" ]]; then
|
[[ "$time_since_last_lxc_check" -ge "${LXC_STATUS_CHECK_INTERVAL:-60}" ]]; then
|
||||||
last_lxc_status_check_time=${current_time}
|
last_lxc_status_check_time=$current_time
|
||||||
if check_status_changed "lxc"; then
|
if check_status_changed "lxc"; then
|
||||||
changes_detected=true
|
update_lxc=true
|
||||||
log_warning "LXC status changes detected, updating tags"
|
log_warning "LXC status changes detected"
|
||||||
update_all_tags "lxc"
|
|
||||||
last_update_lxc_time=${current_time}
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check VM status
|
# Check VM status
|
||||||
local time_since_last_vm_check=$((current_time - last_vm_status_check_time))
|
local time_since_last_vm_check=$((current_time - last_vm_status_check_time))
|
||||||
if [[ "${VM_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
|
if [[ "${VM_STATUS_CHECK_INTERVAL:-60}" -gt 0 ]] && \
|
||||||
[[ "${time_since_last_vm_check}" -ge "${VM_STATUS_CHECK_INTERVAL:-60}" ]]; then
|
[[ "$time_since_last_vm_check" -ge "${VM_STATUS_CHECK_INTERVAL:-60}" ]]; then
|
||||||
last_vm_status_check_time=${current_time}
|
last_vm_status_check_time=$current_time
|
||||||
if check_status_changed "vm"; then
|
if check_status_changed "vm"; then
|
||||||
changes_detected=true
|
update_vm=true
|
||||||
log_warning "VM status changes detected, updating tags"
|
log_warning "VM status changes detected"
|
||||||
update_all_tags "vm"
|
|
||||||
last_update_vm_time=${current_time}
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check network interface changes
|
# Check network interface changes
|
||||||
local time_since_last_fw_check=$((current_time - last_fw_net_interface_check_time))
|
local time_since_last_fw_check=$((current_time - last_fw_net_interface_check_time))
|
||||||
if [[ "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" -gt 0 ]] && \
|
if [[ "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" -gt 0 ]] && \
|
||||||
[[ "${time_since_last_fw_check}" -ge "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" ]]; then
|
[[ "$time_since_last_fw_check" -ge "${FW_NET_INTERFACE_CHECK_INTERVAL:-60}" ]]; then
|
||||||
last_fw_net_interface_check_time=${current_time}
|
last_fw_net_interface_check_time=$current_time
|
||||||
if check_status_changed "fw"; then
|
if check_status_changed "fw"; then
|
||||||
changes_detected=true
|
update_lxc=true
|
||||||
log_warning "Network interface changes detected, updating all tags"
|
update_vm=true
|
||||||
update_all_tags "lxc"
|
log_warning "Network interface changes detected"
|
||||||
update_all_tags "vm"
|
|
||||||
last_update_lxc_time=${current_time}
|
|
||||||
last_update_vm_time=${current_time}
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Force update if needed
|
# Force update if interval exceeded
|
||||||
for type in "lxc" "vm"; do
|
for type in "lxc" "vm"; do
|
||||||
local last_update_var="last_update_${type}_time"
|
local last_update_var="last_update_${type}_time"
|
||||||
local time_since_last_update=$((current_time - ${!last_update_var}))
|
local time_since_last_update=$((current_time - ${!last_update_var}))
|
||||||
if [[ ${time_since_last_update} -ge ${FORCE_UPDATE_INTERVAL:-1800} ]]; then
|
if [[ $time_since_last_update -ge ${FORCE_UPDATE_INTERVAL:-1800} ]]; then
|
||||||
changes_detected=true
|
|
||||||
local minutes=$((${FORCE_UPDATE_INTERVAL:-1800} / 60))
|
|
||||||
if [[ "$type" == "lxc" ]]; then
|
if [[ "$type" == "lxc" ]]; then
|
||||||
log_info "Scheduled LXC update (every ${minutes} minutes)"
|
update_lxc=true
|
||||||
|
log_info "Scheduled LXC update (every $((FORCE_UPDATE_INTERVAL / 60)) minutes)"
|
||||||
else
|
else
|
||||||
log_info "Scheduled VM update (every ${minutes} minutes)"
|
update_vm=true
|
||||||
|
log_info "Scheduled VM update (every $((FORCE_UPDATE_INTERVAL / 60)) minutes)"
|
||||||
fi
|
fi
|
||||||
update_all_tags "$type"
|
|
||||||
eval "${last_update_var}=${current_time}"
|
eval "${last_update_var}=${current_time}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# Final execution
|
||||||
|
$update_lxc && update_all_tags "lxc"
|
||||||
|
$update_vm && update_all_tags "vm"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Initialize time variables
|
# Initialize time variables
|
||||||
@ -872,9 +889,19 @@ main() {
|
|||||||
echo -e "${BLUE}ℹ${NC} Tag format: ${WHITE}${TAG_FORMAT:-$DEFAULT_TAG_FORMAT}${NC}"
|
echo -e "${BLUE}ℹ${NC} Tag format: ${WHITE}${TAG_FORMAT:-$DEFAULT_TAG_FORMAT}${NC}"
|
||||||
echo -e "${BLUE}ℹ${NC} Allowed CIDRs: ${WHITE}${CIDR_LIST[*]}${NC}"
|
echo -e "${BLUE}ℹ${NC} Allowed CIDRs: ${WHITE}${CIDR_LIST[*]}${NC}"
|
||||||
echo -e "${PURPLE}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
|
echo -e "${PURPLE}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
|
||||||
check
|
|
||||||
|
if [[ "$FORCE_SINGLE_RUN" == "true" ]]; then
|
||||||
|
check
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
check
|
||||||
|
sleep "${LOOP_INTERVAL:-300}"
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Cache cleanup function
|
# Cache cleanup function
|
||||||
cleanup_vm_cache() {
|
cleanup_vm_cache() {
|
||||||
local cache_dir="/tmp"
|
local cache_dir="/tmp"
|
||||||
@ -997,7 +1024,7 @@ process_vms_parallel() {
|
|||||||
# Parallel LXC processing function
|
# Parallel LXC processing function
|
||||||
process_lxc_parallel() {
|
process_lxc_parallel() {
|
||||||
local lxc_list=("$@")
|
local lxc_list=("$@")
|
||||||
local max_parallel=${MAX_PARALLEL_LXC_CHECKS:-7}
|
local max_parallel=${MAX_PARALLEL_LXC_CHECKS:-2}
|
||||||
local batch_size=${LXC_BATCH_SIZE:-20}
|
local batch_size=${LXC_BATCH_SIZE:-20}
|
||||||
local job_count=0
|
local job_count=0
|
||||||
local pids=()
|
local pids=()
|
||||||
@ -1173,7 +1200,7 @@ get_lxc_ips() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Fallback: always do lxc-attach/pct exec with timeout if nothing found
|
# Fallback: always do lxc-attach/pct exec with timeout if nothing found
|
||||||
if [[ -z "$ips" ]]; then
|
if [[ -z "$ips" && "${LXC_ALLOW_FORCED_COMMANDS:-true}" == "true" ]]; then
|
||||||
debug_log "lxc $vmid: trying fallback lxc-attach (forced)"
|
debug_log "lxc $vmid: trying fallback lxc-attach (forced)"
|
||||||
local attach_ip=""
|
local attach_ip=""
|
||||||
attach_ip=$(timeout 7s lxc-attach -n "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
|
attach_ip=$(timeout 7s lxc-attach -n "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
|
||||||
@ -1188,7 +1215,7 @@ get_lxc_ips() {
|
|||||||
method_used="lxc_attach_forced"
|
method_used="lxc_attach_forced"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
if [[ -z "$ips" ]]; then
|
if [[ -z "$ips" && "${LXC_ALLOW_FORCED_COMMANDS:-true}" == "true" ]]; then
|
||||||
debug_log "lxc $vmid: trying fallback pct exec (forced)"
|
debug_log "lxc $vmid: trying fallback pct exec (forced)"
|
||||||
local pct_ip=""
|
local pct_ip=""
|
||||||
pct_ip=$(timeout 7s pct exec "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
|
pct_ip=$(timeout 7s pct exec "$vmid" -- ip -4 addr show 2>/dev/null | grep -oE '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v '127.0.0.1' | head -1)
|
||||||
@ -1310,6 +1337,21 @@ msg_info "Restarting Service with optimizations"
|
|||||||
systemctl restart iptag.service &>/dev/null
|
systemctl restart iptag.service &>/dev/null
|
||||||
msg_ok "Service restarted with CPU optimizations"
|
msg_ok "Service restarted with CPU optimizations"
|
||||||
|
|
||||||
|
msg_info "Creating manual run command"
|
||||||
|
cat <<'EOF' >/usr/local/bin/iptag-run
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
CONFIG_FILE="/opt/iptag/iptag.conf"
|
||||||
|
SCRIPT_FILE="/opt/iptag/iptag"
|
||||||
|
if [[ ! -f "$SCRIPT_FILE" ]]; then
|
||||||
|
echo "❌ Main script not found: $SCRIPT_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
export FORCE_SINGLE_RUN=true
|
||||||
|
exec "$SCRIPT_FILE"
|
||||||
|
EOF
|
||||||
|
chmod +x /usr/local/bin/iptag-run
|
||||||
|
msg_ok "Created iptag-run executable - You can execute this manually by entering “iptag-run” in the Proxmox host, so the script is executed by hand."
|
||||||
|
|
||||||
SPINNER_PID=""
|
SPINNER_PID=""
|
||||||
echo -e "\n${APP} installation completed successfully! ${CL}\n"
|
echo -e "\n${APP} installation completed successfully! ${CL}\n"
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user