1 Commits

Author SHA1 Message Date
github-actions[bot]
930d28909a Delete split-pro (ct) after migration to ProxmoxVE 2026-03-18 07:44:05 +00:00
61 changed files with 3512 additions and 5082 deletions

764
.github/workflows/pocketbase-bot.yml generated vendored
View File

@@ -1,764 +0,0 @@
name: PocketBase Bot
on:
issue_comment:
types: [created]
permissions:
issues: write
pull-requests: write
contents: read
jobs:
pocketbase-bot:
runs-on: self-hosted
# Only act on /pocketbase commands
if: startsWith(github.event.comment.body, '/pocketbase')
steps:
- name: Execute PocketBase bot command
env:
POCKETBASE_URL: ${{ secrets.POCKETBASE_URL }}
POCKETBASE_COLLECTION: ${{ secrets.POCKETBASE_COLLECTION }}
POCKETBASE_ADMIN_EMAIL: ${{ secrets.POCKETBASE_ADMIN_EMAIL }}
POCKETBASE_ADMIN_PASSWORD: ${{ secrets.POCKETBASE_ADMIN_PASSWORD }}
COMMENT_BODY: ${{ github.event.comment.body }}
COMMENT_ID: ${{ github.event.comment.id }}
ISSUE_NUMBER: ${{ github.event.issue.number }}
REPO_OWNER: ${{ github.repository_owner }}
REPO_NAME: ${{ github.event.repository.name }}
ACTOR: ${{ github.event.comment.user.login }}
ACTOR_ASSOCIATION: ${{ github.event.comment.author_association }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
node << 'ENDSCRIPT'
(async function () {
const https = require('https');
const http = require('http');
const url = require('url');
// ── HTTP helper with redirect following ────────────────────────────
function request(fullUrl, opts, redirectCount) {
redirectCount = redirectCount || 0;
return new Promise(function (resolve, reject) {
const u = url.parse(fullUrl);
const isHttps = u.protocol === 'https:';
const body = opts.body;
const options = {
hostname: u.hostname,
port: u.port || (isHttps ? 443 : 80),
path: u.path,
method: opts.method || 'GET',
headers: opts.headers || {}
};
if (body) options.headers['Content-Length'] = Buffer.byteLength(body);
const lib = isHttps ? https : http;
const req = lib.request(options, function (res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
if (redirectCount >= 5) return reject(new Error('Too many redirects from ' + fullUrl));
const redirectUrl = url.resolve(fullUrl, res.headers.location);
res.resume();
resolve(request(redirectUrl, opts, redirectCount + 1));
return;
}
let data = '';
res.on('data', function (chunk) { data += chunk; });
res.on('end', function () {
resolve({ ok: res.statusCode >= 200 && res.statusCode < 300, statusCode: res.statusCode, body: data });
});
});
req.on('error', reject);
if (body) req.write(body);
req.end();
});
}
// ── GitHub API helpers ─────────────────────────────────────────────
const owner = process.env.REPO_OWNER;
const repo = process.env.REPO_NAME;
const issueNumber = parseInt(process.env.ISSUE_NUMBER, 10);
const commentId = parseInt(process.env.COMMENT_ID, 10);
const actor = process.env.ACTOR;
function ghRequest(path, method, body) {
const headers = {
'Authorization': 'Bearer ' + process.env.GITHUB_TOKEN,
'Accept': 'application/vnd.github+json',
'X-GitHub-Api-Version': '2022-11-28',
'User-Agent': 'PocketBase-Bot'
};
const bodyStr = body ? JSON.stringify(body) : undefined;
if (bodyStr) headers['Content-Type'] = 'application/json';
return request('https://api.github.com' + path, { method: method || 'GET', headers, body: bodyStr });
}
async function addReaction(content) {
try {
await ghRequest(
'/repos/' + owner + '/' + repo + '/issues/comments/' + commentId + '/reactions',
'POST', { content }
);
} catch (e) {
console.warn('Could not add reaction:', e.message);
}
}
async function postComment(text) {
const res = await ghRequest(
'/repos/' + owner + '/' + repo + '/issues/' + issueNumber + '/comments',
'POST', { body: text }
);
if (!res.ok) console.warn('Could not post comment:', res.body);
}
// ── Permission check ───────────────────────────────────────────────
// author_association: OWNER = repo/org owner, MEMBER = org member (includes Contributors team)
const association = process.env.ACTOR_ASSOCIATION;
if (association !== 'OWNER' && association !== 'MEMBER') {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: @' + actor + ' is not authorized to use this command.\n' +
'Only org members (Contributors team) can use `/pocketbase`.'
);
process.exit(0);
}
// ── Acknowledge ────────────────────────────────────────────────────
await addReaction('eyes');
// ── Parse command ──────────────────────────────────────────────────
// Formats (first line of comment):
// /pocketbase <slug> field=value [field=value ...] ← field updates (simple values)
// /pocketbase <slug> set <field> ← value from code block below
// /pocketbase <slug> note list|add|edit|remove ... ← note management
// /pocketbase <slug> method list ← list install methods
// /pocketbase <slug> method <type> cpu=N ram=N hdd=N ← edit install method resources
const commentBody = process.env.COMMENT_BODY || '';
const lines = commentBody.trim().split('\n');
const firstLine = lines[0].trim();
const withoutCmd = firstLine.replace(/^\/pocketbase\s+/, '').trim();
// Extract code block content from comment body (```...``` or ```lang\n...```)
function extractCodeBlock(body) {
const m = body.match(/```[^\n]*\n([\s\S]*?)```/);
return m ? m[1].trim() : null;
}
const codeBlockValue = extractCodeBlock(commentBody);
const HELP_TEXT =
'**Field update (simple):** `/pocketbase <slug> field=value [field=value ...]`\n\n' +
'**Field update (HTML/multiline) — value from code block:**\n' +
'````\n' +
'/pocketbase <slug> set description\n' +
'```html\n' +
'<p>Your <b>HTML</b> or multi-line content here</p>\n' +
'```\n' +
'````\n\n' +
'**Note management:**\n' +
'```\n' +
'/pocketbase <slug> note list\n' +
'/pocketbase <slug> note add <type> "<text>"\n' +
'/pocketbase <slug> note edit <type> "<old text>" "<new text>"\n' +
'/pocketbase <slug> note remove <type> "<text>"\n' +
'```\n\n' +
'**Install method resources:**\n' +
'```\n' +
'/pocketbase <slug> method list\n' +
'/pocketbase <slug> method <type> hdd=10\n' +
'/pocketbase <slug> method <type> cpu=4 ram=2048 hdd=20\n' +
'```\n\n' +
'**Editable fields:** `name` `description` `logo` `documentation` `website` `project_url` `github` ' +
'`config_path` `port` `default_user` `default_passwd` ' +
'`updateable` `privileged` `has_arm` `is_dev` ' +
'`is_disabled` `disable_message` `is_deleted` `deleted_message`';
if (!withoutCmd) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: No slug or command specified.\n\n' + HELP_TEXT);
process.exit(0);
}
const spaceIdx = withoutCmd.indexOf(' ');
const slug = (spaceIdx === -1 ? withoutCmd : withoutCmd.substring(0, spaceIdx)).trim();
const rest = spaceIdx === -1 ? '' : withoutCmd.substring(spaceIdx + 1).trim();
if (!rest) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: No command specified for slug `' + slug + '`.\n\n' + HELP_TEXT);
process.exit(0);
}
// ── Allowed fields and their types ─────────────────────────────────
// ── PocketBase: authenticate (shared by all paths) ─────────────────
const raw = process.env.POCKETBASE_URL.replace(/\/$/, '');
const apiBase = /\/api$/i.test(raw) ? raw : raw + '/api';
const coll = process.env.POCKETBASE_COLLECTION;
const authRes = await request(apiBase + '/collections/users/auth-with-password', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
identity: process.env.POCKETBASE_ADMIN_EMAIL,
password: process.env.POCKETBASE_ADMIN_PASSWORD
})
});
if (!authRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: PocketBase authentication failed. CC @' + owner + '/maintainers');
process.exit(1);
}
const token = JSON.parse(authRes.body).token;
// ── PocketBase: find record by slug (shared by all paths) ──────────
const recordsUrl = apiBase + '/collections/' + encodeURIComponent(coll) + '/records';
const filter = "(slug='" + slug.replace(/'/g, "''") + "')";
const listRes = await request(recordsUrl + '?filter=' + encodeURIComponent(filter) + '&perPage=1', {
headers: { 'Authorization': token }
});
const list = JSON.parse(listRes.body);
const record = list.items && list.items[0];
if (!record) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: No record found for slug `' + slug + '`.\n\n' +
'Make sure the script was already pushed to PocketBase (JSON must exist and have been synced).'
);
process.exit(0);
}
// ── Route: dispatch to subcommand handler ──────────────────────────
const noteMatch = rest.match(/^note\s+(list|add|edit|remove)\b/i);
const methodMatch = rest.match(/^method\b/i);
const setMatch = rest.match(/^set\s+(\S+)/i);
if (noteMatch) {
// ── NOTE SUBCOMMAND ──────────────────────────────────────────────
} else if (methodMatch) {
// ── METHOD SUBCOMMAND ────────────────────────────────────────────
const methodArgs = rest.replace(/^method\s*/i, '').trim();
const methodListMode = !methodArgs || methodArgs.toLowerCase() === 'list';
// Helper: format bytes/numbers nicely
function fmtMethod(im) {
const r = im.resources || {};
const typeLabel = im.expand && im.expand.type ? (im.expand.type.type || im.expand.type.name || im.expand.type.value || im.type) : im.type;
return '**`' + (typeLabel || '(unknown type)') + '`** — CPU: `' + (r.cpu != null ? r.cpu : im.resources_cpu || 0) + '` · RAM: `' + (r.ram != null ? r.ram : im.resources_ram || 0) + ' MB` · HDD: `' + (r.hdd != null ? r.hdd : im.resources_hdd || 0) + ' GB`';
}
// Fetch expanded install_methods
const expRes = await request(recordsUrl + '/' + record.id + '?expand=install_methods,install_methods.type', { headers: { 'Authorization': token } });
const expRec = JSON.parse(expRes.body);
const installMethods = (expRec.expand && expRec.expand.install_methods) || [];
if (methodListMode) {
await addReaction('+1');
if (installMethods.length === 0) {
await postComment(' **PocketBase Bot**: No install methods found for **`' + slug + '`**.');
} else {
const lines = installMethods.map(function (im, i) {
const r = im.resources || {};
const typeLabel = im.expand && im.expand.type ? (im.expand.type.type || im.expand.type.name || im.expand.type.value || '') : '';
return (i + 1) + '. **`' + (typeLabel || im.type || im.id) + '`** — CPU: `' + (im.resources_cpu || 0) + '` · RAM: `' + (im.resources_ram || 0) + ' MB` · HDD: `' + (im.resources_hdd || 0) + ' GB`';
}).join('\n');
await postComment(' **PocketBase Bot**: Install methods for **`' + slug + '`** (' + installMethods.length + ' total)\n\n' + lines);
}
} else {
// Parse: <type> cpu=N ram=N hdd=N
const methodParts = methodArgs.match(/^(\S+)\s+(.+)$/);
if (!methodParts) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: Invalid `method` syntax.\n\n' +
'**Usage:**\n```\n/pocketbase ' + slug + ' method list\n/pocketbase ' + slug + ' method <type> hdd=10\n/pocketbase ' + slug + ' method <type> cpu=4 ram=2048 hdd=20\n```'
);
process.exit(0);
}
const targetType = methodParts[1].toLowerCase();
const resourcesStr = methodParts[2];
// Parse resource fields (only cpu/ram/hdd allowed)
const RESOURCE_FIELDS = { cpu: true, ram: true, hdd: true };
const resourceChanges = {};
const rePairs = /([a-z]+)=(\d+)/gi;
let m;
while ((m = rePairs.exec(resourcesStr)) !== null) {
const key = m[1].toLowerCase();
if (RESOURCE_FIELDS[key]) resourceChanges[key] = parseInt(m[2], 10);
}
if (Object.keys(resourceChanges).length === 0) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: No valid resource fields found. Use `cpu=N`, `ram=N`, `hdd=N`.');
process.exit(0);
}
// Find matching install method by type name/value
const matchedMethod = installMethods.find(function (im) {
const typeLabel = im.expand && im.expand.type ?
(im.expand.type.type || im.expand.type.name || im.expand.type.value || '') : '';
return typeLabel.toLowerCase() === targetType || (im.type && im.type.toLowerCase && im.type.toLowerCase() === targetType);
});
if (!matchedMethod) {
await addReaction('-1');
const availableTypes = installMethods.map(function (im) {
return im.expand && im.expand.type ? (im.expand.type.type || im.expand.type.name || im.expand.type.value || im.type || im.id) : (im.type || im.id);
});
await postComment(
'❌ **PocketBase Bot**: No install method with type `' + targetType + '` found for `' + slug + '`.\n\n' +
'**Available types:** `' + availableTypes.join('`, `') + '`\n\n' +
'Use `/pocketbase ' + slug + ' method list` to see all methods.'
);
process.exit(0);
}
// Build patch payload for script_install_methods record
const imPatch = {};
if (resourceChanges.cpu != null) imPatch.resources_cpu = resourceChanges.cpu;
if (resourceChanges.ram != null) imPatch.resources_ram = resourceChanges.ram;
if (resourceChanges.hdd != null) imPatch.resources_hdd = resourceChanges.hdd;
const imPatchRes = await request(apiBase + '/collections/script_install_methods/records/' + matchedMethod.id, {
method: 'PATCH',
headers: { 'Authorization': token, 'Content-Type': 'application/json' },
body: JSON.stringify(imPatch)
});
if (!imPatchRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: PATCH failed for install method:\n```\n' + imPatchRes.body + '\n```');
process.exit(1);
}
const typeLabel = matchedMethod.expand && matchedMethod.expand.type ?
(matchedMethod.expand.type.type || matchedMethod.expand.type.name || matchedMethod.expand.type.value || targetType) : targetType;
const changesLines = Object.entries(resourceChanges)
.map(function ([k, v]) { return '- `' + k + '` → `' + v + (k === 'ram' ? ' MB' : k === 'hdd' ? ' GB' : '') + '`'; })
.join('\n');
await addReaction('+1');
await postComment(
'✅ **PocketBase Bot**: Updated install method **`' + typeLabel + '`** for **`' + slug + '`**\n\n' +
'**Changes applied:**\n' + changesLines + '\n\n' +
'*Executed by @' + actor + '*'
);
}
} else if (setMatch) {
// ── SET SUBCOMMAND (multi-line / HTML / special chars via code block) ──
const fieldName = setMatch[1].toLowerCase();
const SET_ALLOWED = {
name: 'string', description: 'string', logo: 'string',
documentation: 'string', website: 'string', project_url: 'string', github: 'string',
config_path: 'string', disable_message: 'string', deleted_message: 'string'
};
if (!SET_ALLOWED[fieldName]) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: `set` only supports text fields.\n\n' +
'**Allowed:** `' + Object.keys(SET_ALLOWED).join('`, `') + '`\n\n' +
'For boolean/number fields use `field=value` syntax instead.'
);
process.exit(0);
}
if (!codeBlockValue) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: `set` requires a code block with the value.\n\n' +
'**Usage:**\n````\n/pocketbase ' + slug + ' set ' + fieldName + '\n```\nYour content here (HTML, multiline, special chars all fine)\n```\n````'
);
process.exit(0);
}
const setPayload = {};
setPayload[fieldName] = codeBlockValue;
const setPatchRes = await request(recordsUrl + '/' + record.id, {
method: 'PATCH',
headers: { 'Authorization': token, 'Content-Type': 'application/json' },
body: JSON.stringify(setPayload)
});
if (!setPatchRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: PATCH failed for `' + slug + '`:\n```\n' + setPatchRes.body + '\n```');
process.exit(1);
}
const preview = codeBlockValue.length > 300 ? codeBlockValue.substring(0, 300) + '…' : codeBlockValue;
await addReaction('+1');
await postComment(
'✅ **PocketBase Bot**: Set `' + fieldName + '` for **`' + slug + '`**\n\n' +
'**Value set:**\n```\n' + preview + '\n```\n\n' +
'*Executed by @' + actor + '*'
);
const noteArgsStr = rest.substring(noteMatch[0].length).trim();
// Load note types from PocketBase
const noteTypeToId = {};
const noteTypeToName = {};
try {
const ntRes = await request(apiBase + '/collections/z_ref_note_types/records?perPage=500', { headers: { 'Authorization': token } });
if (ntRes.ok) {
JSON.parse(ntRes.body).items.forEach(function (item) {
if (item.type != null) {
noteTypeToId[item.type.toLowerCase()] = item.id;
noteTypeToName[item.id] = item.type;
}
});
}
} catch (e) { console.warn('z_ref_note_types:', e.message); }
const VALID_NOTE_TYPES = Object.keys(noteTypeToId);
// Token parser: unquoted-word OR "quoted string" (supports \" escapes)
function parseNoteTokens(str) {
const tokens = [];
let pos = 0;
while (pos < str.length) {
while (pos < str.length && /\s/.test(str[pos])) pos++;
if (pos >= str.length) break;
if (str[pos] === '"') {
pos++;
let start = pos;
while (pos < str.length && str[pos] !== '"') {
if (str[pos] === '\\') pos++;
pos++;
}
tokens.push(str.substring(start, pos).replace(/\\"/g, '"'));
if (pos < str.length) pos++;
} else {
let start = pos;
while (pos < str.length && !/\s/.test(str[pos])) pos++;
tokens.push(str.substring(start, pos));
}
}
return tokens;
}
// Helper: fetch record with expanded notes relation
async function fetchExpandedNotes() {
const r = await request(recordsUrl + '/' + record.id + '?expand=notes', { headers: { 'Authorization': token } });
const rec = JSON.parse(r.body);
return { rec, notes: (rec.expand && rec.expand.notes) || [] };
}
// Helper: format notes list for display
function formatNotesList(notes) {
if (notes.length === 0) return '*None*';
return notes.map(function (n, i) {
return (i + 1) + '. **`' + (noteTypeToName[n.type] || n.type) + '`**: ' + n.text;
}).join('\n');
}
if (noteAction === 'list') {
// ── note list ────────────────────────────────────────────────
const { notes } = await fetchExpandedNotes();
await addReaction('+1');
await postComment(
' **PocketBase Bot**: Notes for **`' + slug + '`** (' + notes.length + ' total)\n\n' +
formatNotesList(notes)
);
} else if (noteAction === 'add') {
// ── note add <type> "<text>" ──────────────────────────────────
const tokens = parseNoteTokens(noteArgsStr);
if (tokens.length < 2) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: `note add` requires `<type>` and `"<text>"`.\n\n' +
'**Usage:** `/pocketbase ' + slug + ' note add <type> "<text>"`\n' +
'**Valid types:** `' + VALID_NOTE_TYPES.join('`, `') + '`'
);
process.exit(0);
}
const noteType = tokens[0].toLowerCase();
const noteText = tokens.slice(1).join(' ');
const typeId = noteTypeToId[noteType];
if (!typeId) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: Unknown note type `' + noteType + '`.\n' +
'**Valid types:** `' + VALID_NOTE_TYPES.join('`, `') + '`'
);
process.exit(0);
}
// POST new note to script_notes
const postNoteRes = await request(apiBase + '/collections/script_notes/records', {
method: 'POST',
headers: { 'Authorization': token, 'Content-Type': 'application/json' },
body: JSON.stringify({ text: noteText, type: typeId, script: record.id })
});
if (!postNoteRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: Failed to create note:\n```\n' + postNoteRes.body + '\n```');
process.exit(1);
}
const newNoteId = JSON.parse(postNoteRes.body).id;
// PATCH script to include new note in relation
const existingNoteIds = Array.isArray(record.notes) ? record.notes : [];
const patchLinkRes = await request(recordsUrl + '/' + record.id, {
method: 'PATCH',
headers: { 'Authorization': token, 'Content-Type': 'application/json' },
body: JSON.stringify({ notes: [...existingNoteIds, newNoteId] })
});
if (!patchLinkRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: Note created but failed to link to script:\n```\n' + patchLinkRes.body + '\n```');
process.exit(1);
}
await addReaction('+1');
await postComment(
'✅ **PocketBase Bot**: Added note to **`' + slug + '`**\n\n' +
'- **Type:** `' + noteType + '`\n' +
'- **Text:** ' + noteText + '\n\n' +
'*Executed by @' + actor + '*'
);
} else if (noteAction === 'edit') {
// ── note edit <type> "<old text>" "<new text>" ────────────────
const tokens = parseNoteTokens(noteArgsStr);
if (tokens.length < 3) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: `note edit` requires `<type>`, `"<old text>"`, and `"<new text>"`.\n\n' +
'**Usage:** `/pocketbase ' + slug + ' note edit <type> "<old text>" "<new text>"`\n' +
'**Valid types:** `' + VALID_NOTE_TYPES.join('`, `') + '`\n\n' +
'Use `/pocketbase ' + slug + ' note list` to see current notes.'
);
process.exit(0);
}
const noteType = tokens[0].toLowerCase();
const oldText = tokens[1];
const newText = tokens[2];
const typeId = noteTypeToId[noteType];
if (!typeId) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: Unknown note type `' + noteType + '`.\n' +
'**Valid types:** `' + VALID_NOTE_TYPES.join('`, `') + '`'
);
process.exit(0);
}
const { notes } = await fetchExpandedNotes();
const matchingNote = notes.find(function (n) { return n.type === typeId && n.text === oldText; });
if (!matchingNote) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: No `' + noteType + '` note found with that exact text.\n\n' +
'**Current notes for `' + slug + '`:**\n' + formatNotesList(notes)
);
process.exit(0);
}
// PATCH the note record directly
const patchNoteRes = await request(apiBase + '/collections/script_notes/records/' + matchingNote.id, {
method: 'PATCH',
headers: { 'Authorization': token, 'Content-Type': 'application/json' },
body: JSON.stringify({ text: newText })
});
if (!patchNoteRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: Failed to update note:\n```\n' + patchNoteRes.body + '\n```');
process.exit(1);
}
await addReaction('+1');
await postComment(
'✅ **PocketBase Bot**: Edited note in **`' + slug + '`**\n\n' +
'- **Type:** `' + noteType + '`\n' +
'- **Old:** ' + oldText + '\n' +
'- **New:** ' + newText + '\n\n' +
'*Executed by @' + actor + '*'
);
} else if (noteAction === 'remove') {
// ── note remove <type> "<text>" ───────────────────────────────
const tokens = parseNoteTokens(noteArgsStr);
if (tokens.length < 2) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: `note remove` requires `<type>` and `"<text>"`.\n\n' +
'**Usage:** `/pocketbase ' + slug + ' note remove <type> "<text>"`\n' +
'**Valid types:** `' + VALID_NOTE_TYPES.join('`, `') + '`\n\n' +
'Use `/pocketbase ' + slug + ' note list` to see current notes.'
);
process.exit(0);
}
const noteType = tokens[0].toLowerCase();
const noteText = tokens[1];
const typeId = noteTypeToId[noteType];
if (!typeId) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: Unknown note type `' + noteType + '`.\n' +
'**Valid types:** `' + VALID_NOTE_TYPES.join('`, `') + '`'
);
process.exit(0);
}
const { rec: expandedRecord, notes } = await fetchExpandedNotes();
const matchingNote = notes.find(function (n) { return n.type === typeId && n.text === noteText; });
if (!matchingNote) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: No `' + noteType + '` note found with that exact text.\n\n' +
'**Current notes for `' + slug + '`:**\n' + formatNotesList(notes)
);
process.exit(0);
}
// PATCH script to remove note ID from relation, then DELETE the note record
const existingNoteIds = Array.isArray(expandedRecord.notes) ? expandedRecord.notes : [];
const patchRes = await request(recordsUrl + '/' + record.id, {
method: 'PATCH',
headers: { 'Authorization': token, 'Content-Type': 'application/json' },
body: JSON.stringify({ notes: existingNoteIds.filter(function (id) { return id !== matchingNote.id; }) })
});
if (!patchRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: Failed to unlink note from script:\n```\n' + patchRes.body + '\n```');
process.exit(1);
}
const delRes = await request(apiBase + '/collections/script_notes/records/' + matchingNote.id, {
method: 'DELETE',
headers: { 'Authorization': token }
});
if (!delRes.ok && delRes.statusCode !== 204) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: Note unlinked but could not be deleted:\n```\n' + delRes.body + '\n```');
process.exit(1);
}
await addReaction('+1');
await postComment(
'✅ **PocketBase Bot**: Removed note from **`' + slug + '`**\n\n' +
'- **Type:** `' + noteType + '`\n' +
'- **Text:** ' + noteText + '\n\n' +
'*Executed by @' + actor + '*'
);
}
} else {
// ── FIELD=VALUE PATH ─────────────────────────────────────────────
const fieldsStr = rest;
// Skipped: slug, script_created/updated, created (auto), categories/
// install_methods/notes/type (relations), github_data/install_methods_json/
// notes_json (auto-generated), execute_in (select relation), last_update_commit (auto)
const ALLOWED_FIELDS = {
name: 'string',
description: 'string',
logo: 'string',
documentation: 'string',
website: 'string',
project_url: 'string',
github: 'string',
config_path: 'string',
port: 'number',
default_user: 'nullable_string',
default_passwd: 'nullable_string',
updateable: 'boolean',
privileged: 'boolean',
has_arm: 'boolean',
is_dev: 'boolean',
is_disabled: 'boolean',
disable_message: 'string',
is_deleted: 'boolean',
deleted_message: 'string',
};
// Field=value parser (handles quoted values and empty=null)
function parseFields(str) {
const fields = {};
let pos = 0;
while (pos < str.length) {
while (pos < str.length && /\s/.test(str[pos])) pos++;
if (pos >= str.length) break;
let keyStart = pos;
while (pos < str.length && str[pos] !== '=' && !/\s/.test(str[pos])) pos++;
const key = str.substring(keyStart, pos).trim();
if (!key || pos >= str.length || str[pos] !== '=') { pos++; continue; }
pos++;
let value;
if (str[pos] === '"') {
pos++;
let valStart = pos;
while (pos < str.length && str[pos] !== '"') {
if (str[pos] === '\\') pos++;
pos++;
}
value = str.substring(valStart, pos).replace(/\\"/g, '"');
if (pos < str.length) pos++;
} else {
let valStart = pos;
while (pos < str.length && !/\s/.test(str[pos])) pos++;
value = str.substring(valStart, pos);
}
fields[key] = value;
}
return fields;
}
const parsedFields = parseFields(fieldsStr);
const unknownFields = Object.keys(parsedFields).filter(function (f) { return !ALLOWED_FIELDS[f]; });
if (unknownFields.length > 0) {
await addReaction('-1');
await postComment(
'❌ **PocketBase Bot**: Unknown field(s): `' + unknownFields.join('`, `') + '`\n\n' +
'**Allowed fields:** `' + Object.keys(ALLOWED_FIELDS).join('`, `') + '`'
);
process.exit(0);
}
if (Object.keys(parsedFields).length === 0) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: Could not parse any valid `field=value` pairs.\n\n' + HELP_TEXT);
process.exit(0);
}
// Cast values to correct types
const payload = {};
for (const [key, rawVal] of Object.entries(parsedFields)) {
const type = ALLOWED_FIELDS[key];
if (type === 'boolean') {
if (rawVal === 'true') payload[key] = true;
else if (rawVal === 'false') payload[key] = false;
else {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: `' + key + '` must be `true` or `false`, got: `' + rawVal + '`');
process.exit(0);
}
} else if (type === 'number') {
const n = parseInt(rawVal, 10);
if (isNaN(n)) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: `' + key + '` must be a number, got: `' + rawVal + '`');
process.exit(0);
}
payload[key] = n;
} else if (type === 'nullable_string') {
payload[key] = rawVal === '' ? null : rawVal;
} else {
payload[key] = rawVal;
}
}
const patchRes = await request(recordsUrl + '/' + record.id, {
method: 'PATCH',
headers: { 'Authorization': token, 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
});
if (!patchRes.ok) {
await addReaction('-1');
await postComment('❌ **PocketBase Bot**: PATCH failed for `' + slug + '`:\n```\n' + patchRes.body + '\n```');
process.exit(1);
}
await addReaction('+1');
const changesLines = Object.entries(payload)
.map(function ([k, v]) { return '- `' + k + '` → `' + JSON.stringify(v) + '`'; })
.join('\n');
await postComment(
'✅ **PocketBase Bot**: Updated **`' + slug + '`** successfully!\n\n' +
'**Changes applied:**\n' + changesLines + '\n\n' +
'*Executed by @' + actor + '*'
);
}
console.log('Done.');
})().catch(function (e) {
console.error('Fatal error:', e.message || e);
process.exit(1);
});
ENDSCRIPT
shell: bash

View File

@@ -4,7 +4,7 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
script_slug: script_slug:
description: "Script slug (e.g. my-app)" description: 'Script slug (e.g. my-app)'
required: true required: true
type: string type: string
@@ -49,8 +49,7 @@ jobs:
const https = require('https'); const https = require('https');
const http = require('http'); const http = require('http');
const url = require('url'); const url = require('url');
function request(fullUrl, opts, redirectCount) { function request(fullUrl, opts) {
redirectCount = redirectCount || 0;
return new Promise(function(resolve, reject) { return new Promise(function(resolve, reject) {
const u = url.parse(fullUrl); const u = url.parse(fullUrl);
const isHttps = u.protocol === 'https:'; const isHttps = u.protocol === 'https:';
@@ -65,13 +64,6 @@ jobs:
if (body) options.headers['Content-Length'] = Buffer.byteLength(body); if (body) options.headers['Content-Length'] = Buffer.byteLength(body);
const lib = isHttps ? https : http; const lib = isHttps ? https : http;
const req = lib.request(options, function(res) { const req = lib.request(options, function(res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
if (redirectCount >= 5) return reject(new Error('Too many redirects from ' + fullUrl));
const redirectUrl = url.resolve(fullUrl, res.headers.location);
res.resume();
resolve(request(redirectUrl, opts, redirectCount + 1));
return;
}
let data = ''; let data = '';
res.on('data', function(chunk) { data += chunk; }); res.on('data', function(chunk) { data += chunk; });
res.on('end', function() { res.on('end', function() {
@@ -169,40 +161,11 @@ jobs:
if (!fs.existsSync(file)) continue; if (!fs.existsSync(file)) continue;
const data = JSON.parse(fs.readFileSync(file, 'utf8')); const data = JSON.parse(fs.readFileSync(file, 'utf8'));
if (!data.slug) { console.log('Skipping', file, '(no slug)'); continue; } if (!data.slug) { console.log('Skipping', file, '(no slug)'); continue; }
// execute_in: map type to canonical value
var executeInMap = { ct: 'lxc', lxc: 'lxc', turnkey: 'turnkey', pve: 'pve', addon: 'addon', vm: 'vm' };
var executeIn = data.type ? (executeInMap[data.type.toLowerCase()] || data.type.toLowerCase()) : null;
// github: extract owner/repo from full GitHub URL
var githubField = null;
var projectUrl = data.github || null;
if (data.github) {
var ghMatch = data.github.match(/github\.com\/([^/]+\/[^/?#]+)/);
if (ghMatch) githubField = ghMatch[1].replace(/\.git$/, '');
}
// last_update_commit: last commit touching the actual script files (ct/slug.sh, install/slug-install.sh, vm/slug.sh, etc.)
var lastCommit = null;
try {
var cp = require('child_process');
var scriptFiles = [];
// primary script from install_methods[].script (e.g. "ct/teleport.sh", "vm/teleport.sh")
(data.install_methods || []).forEach(function(im) {
if (im.script) scriptFiles.push(im.script);
});
// derive install script from slug (install/slug-install.sh)
scriptFiles.push('install/' + data.slug + '-install.sh');
// filter to only files that actually exist in git
var existingFiles = scriptFiles.filter(function(f) {
try { cp.execSync('git ls-files --error-unmatch ' + f, { stdio: 'ignore' }); return true; } catch(e) { return false; }
});
if (existingFiles.length > 0) {
lastCommit = cp.execSync('git log -1 --format=%H -- ' + existingFiles.join(' ')).toString().trim() || null;
}
} catch(e) { console.warn('Could not get last commit:', e.message); }
var payload = { var payload = {
name: data.name, name: data.name,
slug: data.slug, slug: data.slug,
script_created: data.date_created || data.script_created, script_created: data.date_created || data.script_created,
script_updated: new Date().toISOString().split('T')[0], script_updated: data.date_created || data.script_updated,
updateable: data.updateable, updateable: data.updateable,
privileged: data.privileged, privileged: data.privileged,
port: data.interface_port != null ? data.interface_port : data.port, port: data.interface_port != null ? data.interface_port : data.port,
@@ -211,16 +174,10 @@ jobs:
logo: data.logo, logo: data.logo,
description: data.description, description: data.description,
config_path: data.config_path, config_path: data.config_path,
default_user: (data.default_credentials && data.default_credentials.username) || data.default_user || null, default_user: (data.default_credentials && data.default_credentials.username) || data.default_user,
default_passwd: (data.default_credentials && data.default_credentials.password) || data.default_passwd || null, default_passwd: (data.default_credentials && data.default_credentials.password) || data.default_passwd,
notes_json: JSON.stringify(data.notes || []),
install_methods_json: JSON.stringify(data.install_methods || []),
is_dev: true is_dev: true
}; };
if (executeIn) payload.execute_in = executeIn;
if (githubField) payload.github = githubField;
if (projectUrl) payload.project_url = projectUrl;
if (lastCommit) payload.last_update_commit = lastCommit;
var resolvedType = typeValueToId[data.type]; var resolvedType = typeValueToId[data.type];
if (resolvedType == null && data.type === 'ct') resolvedType = typeValueToId['lxc']; if (resolvedType == null && data.type === 'ct') resolvedType = typeValueToId['lxc'];
if (resolvedType) payload.type = resolvedType; if (resolvedType) payload.type = resolvedType;

View File

@@ -83,8 +83,7 @@ jobs:
const http = require('http'); const http = require('http');
const url = require('url'); const url = require('url');
function request(fullUrl, opts, redirectCount) { function request(fullUrl, opts) {
redirectCount = redirectCount || 0;
return new Promise(function(resolve, reject) { return new Promise(function(resolve, reject) {
const u = url.parse(fullUrl); const u = url.parse(fullUrl);
const isHttps = u.protocol === 'https:'; const isHttps = u.protocol === 'https:';
@@ -99,13 +98,6 @@ jobs:
if (body) options.headers['Content-Length'] = Buffer.byteLength(body); if (body) options.headers['Content-Length'] = Buffer.byteLength(body);
const lib = isHttps ? https : http; const lib = isHttps ? https : http;
const req = lib.request(options, function(res) { const req = lib.request(options, function(res) {
if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
if (redirectCount >= 5) return reject(new Error('Too many redirects from ' + fullUrl));
const redirectUrl = url.resolve(fullUrl, res.headers.location);
res.resume();
resolve(request(redirectUrl, opts, redirectCount + 1));
return;
}
let data = ''; let data = '';
res.on('data', function(chunk) { data += chunk; }); res.on('data', function(chunk) { data += chunk; });
res.on('end', function() { res.on('end', function() {

View File

@@ -2,17 +2,17 @@
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func) source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: Slaviša Arežina (tremor021) # Author: cobalt (cobaltgit)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://goteleport.com/ # Source: https://ntfy.sh/
APP="Teleport" APP="Alpine-ntfy"
var_tags="${var_tags:-zero-trust}" var_tags="${var_tags:-notification}"
var_cpu="${var_cpu:-1}" var_cpu="${var_cpu:-1}"
var_ram="${var_ram:-1024}" var_ram="${var_ram:-256}"
var_disk="${var_disk:-4}" var_disk="${var_disk:-2}"
var_os="${var_os:-debian}" var_os="${var_os:-alpine}"
var_version="${var_version:-13}" var_version="${var_version:-3.22}"
var_unprivileged="${var_unprivileged:-1}" var_unprivileged="${var_unprivileged:-1}"
header_info "$APP" header_info "$APP"
@@ -24,14 +24,18 @@ function update_script() {
header_info header_info
check_container_storage check_container_storage
check_container_resources check_container_resources
if [[ ! -f /etc/teleport.yaml ]]; then if [[ ! -d /etc/ntfy ]]; then
msg_error "No ${APP} Installation Found!" msg_error "No ${APP} Installation Found!"
exit exit
fi fi
msg_info "Updating ntfy LXC"
$STD apk -U upgrade
setcap 'cap_net_bind_service=+ep' /usr/bin/ntfy
msg_ok "Updated ntfy LXC"
msg_info "Updating Teleport" msg_info "Restarting ntfy"
$STD apt update rc-service ntfy restart
$STD apt upgrade -y msg_ok "Restarted ntfy"
msg_ok "Updated successfully!" msg_ok "Updated successfully!"
exit exit
} }
@@ -43,4 +47,4 @@ description
msg_ok "Completed successfully!\n" msg_ok "Completed successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}" echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}" echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}https://${IP}:3080${CL}" echo -e "${TAB}${GATEWAY}${BGN}http://${IP}${CL}"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 tteck # Copyright (c) 2021-2026 tteck
# Author: tteck (tteckster) | Co-Author: MickLesk (Canbiz) | Co-Author: CrazyWolf13 # Author: tteck (tteckster) | Co-Author: MickLesk (Canbiz) | Co-Author: CrazyWolf13
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://homarr.dev/ # Source: https://homarr.dev/
APP="alpine-homarr" APP="alpine-homarr"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (Canbiz) # Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/community-scripts/ProxmoxVE # Source: https://github.com/community-scripts/ProxmoxVE
APP="Docspell" APP="Docspell"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: Nícolas Pastorello (opastorello) # Author: Nícolas Pastorello (opastorello)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/jumpserver/jumpserver # Source: https://github.com/jumpserver/jumpserver
APP="JumpServer" APP="JumpServer"

View File

@@ -2,7 +2,7 @@
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func) source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2026 tteck # Copyright (c) 2021-2026 tteck
# Author: tteck (tteckster) # Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://petio.tv/ # Source: https://petio.tv/
APP="Petio" APP="Petio"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 tteck # Copyright (c) 2021-2026 tteck
# Author: tteck (tteckster) # Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://nginxproxymanager.com/ # Source: https://nginxproxymanager.com/
APP="Nginx Proxy Manager" APP="Nginx Proxy Manager"

View File

@@ -53,7 +53,7 @@
"type": "info" "type": "info"
}, },
{ {
"text": "**Optional Full-text Search with Apache Tika**: requires your own Tika LXC. See `https://community-scripts.github.io/ProxmoxVED/scripts?id=apache-tika`", "text": "**Optional Full-text Search with Apache Tika**: requires your own Tika LXC. See `https://community-scripts.github.io/ProxmoxVE/scripts?id=apache-tika`",
"type": "info" "type": "info"
}, },
{ {

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ) # Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: # Source:
APP="Roundcubemail" APP="Roundcubemail"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (Canbiz) # Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: # Source:
APP="Squirrel Servers Manager" APP="Squirrel Servers Manager"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: SunFlowerOwl # Author: SunFlowerOwl
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/haugene/docker-transmission-openvpn # Source: https://github.com/haugene/docker-transmission-openvpn
APP="transmission-openvpn" APP="transmission-openvpn"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: Simon Friedrich # Author: Simon Friedrich
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://forgejo.org/ # Source: https://forgejo.org/
APP="Forgejo-Runner" APP="Forgejo-Runner"

View File

@@ -1,101 +0,0 @@
#!/usr/bin/env bash
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/build.func)
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://github.com/danny-avila/LibreChat
APP="LibreChat"
var_tags="${var_tags:-ai;chat}"
var_cpu="${var_cpu:-4}"
var_ram="${var_ram:-6144}"
var_disk="${var_disk:-20}"
var_os="${var_os:-debian}"
var_version="${var_version:-13}"
var_unprivileged="${var_unprivileged:-1}"
header_info "$APP"
variables
color
catch_errors
function update_script() {
header_info
check_container_storage
check_container_resources
if [[ ! -d /opt/librechat ]]; then
msg_error "No ${APP} Installation Found!"
exit
fi
if check_for_gh_tag "librechat" "danny-avila/LibreChat" "v"; then
msg_info "Stopping Services"
systemctl stop librechat rag-api
msg_ok "Stopped Services"
msg_info "Backing up Configuration"
cp /opt/librechat/.env /opt/librechat.env.bak
msg_ok "Backed up Configuration"
CLEAN_INSTALL=1 fetch_and_deploy_gh_tag "librechat" "danny-avila/LibreChat"
msg_info "Installing Dependencies"
cd /opt/librechat
$STD npm ci
msg_ok "Installed Dependencies"
msg_info "Building Frontend"
$STD npm run frontend
$STD npm prune --production
$STD npm cache clean --force
msg_ok "Built Frontend"
msg_info "Restoring Configuration"
cp /opt/librechat.env.bak /opt/librechat/.env
rm -f /opt/librechat.env.bak
msg_ok "Restored Configuration"
msg_info "Starting Services"
systemctl start rag-api librechat
msg_ok "Started Services"
msg_ok "Updated LibreChat Successfully!"
fi
if check_for_gh_release "rag-api" "danny-avila/rag_api"; then
msg_info "Stopping RAG API"
systemctl stop rag-api
msg_ok "Stopped RAG API"
msg_info "Backing up RAG API Configuration"
cp /opt/rag-api/.env /opt/rag-api.env.bak
msg_ok "Backed up RAG API Configuration"
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "rag-api" "danny-avila/rag_api" "tarball"
msg_info "Updating RAG API Dependencies"
cd /opt/rag-api
$STD .venv/bin/pip install -r requirements.lite.txt
msg_ok "Updated RAG API Dependencies"
msg_info "Restoring RAG API Configuration"
cp /opt/rag-api.env.bak /opt/rag-api/.env
rm -f /opt/rag-api.env.bak
msg_ok "Restored RAG API Configuration"
msg_info "Starting RAG API"
systemctl start rag-api
msg_ok "Started RAG API"
msg_ok "Updated RAG API Successfully!"
fi
exit
}
start
build_container
description
msg_ok "Completed Successfully!\n"
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3080${CL}"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2025 minthcm # Copyright (c) 2021-2025 minthcm
# Author: MintHCM # Author: MintHCM
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/minthcm/minthcm # Source: https://github.com/minthcm/minthcm
APP="MintHCM" APP="MintHCM"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2025 community-scripts ORG # Copyright (c) 2021-2025 community-scripts ORG
# Author: KernelSailor # Author: KernelSailor
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://snowflake.torproject.org/ # Source: https://snowflake.torproject.org/
APP="tor-snowflake" APP="tor-snowflake"

View File

@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: dave-yap (dave-yap) | Co-author: remz1337 # Author: dave-yap (dave-yap) | Co-author: remz1337
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://zitadel.com/ # Source: https://zitadel.com/
APP="Zitadel" APP="Zitadel"

View File

@@ -172,7 +172,6 @@ var_unprivileged="1" # 1=unprivileged (secure), 0=privileged (rarely n
``` ```
**Variable Naming Convention**: **Variable Naming Convention**:
- Variables exposed to user: `var_*` (e.g., `var_cpu`, `var_hostname`, `var_ssh`) - Variables exposed to user: `var_*` (e.g., `var_cpu`, `var_hostname`, `var_ssh`)
- Internal variables: lowercase (e.g., `container_id`, `app_version`) - Internal variables: lowercase (e.g., `container_id`, `app_version`)
@@ -274,7 +273,6 @@ echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
**Triggered by**: Called automatically at script start **Triggered by**: Called automatically at script start
**Behavior**: **Behavior**:
1. Parse command-line arguments (if any) 1. Parse command-line arguments (if any)
2. Generate random UUID for session tracking 2. Generate random UUID for session tracking
3. Load container storage from Proxmox 3. Load container storage from Proxmox
@@ -286,7 +284,6 @@ echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
**Purpose**: Launch the container creation menu with 5 installation modes **Purpose**: Launch the container creation menu with 5 installation modes
**Menu Options**: **Menu Options**:
``` ```
1. Default Installation (Quick setup, predefined settings) 1. Default Installation (Quick setup, predefined settings)
2. Advanced Installation (19-step wizard with full control) 2. Advanced Installation (19-step wizard with full control)
@@ -300,7 +297,6 @@ echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8080${CL}"
**Purpose**: Main orchestrator for LXC container creation **Purpose**: Main orchestrator for LXC container creation
**Operations**: **Operations**:
1. Validates all variables 1. Validates all variables
2. Creates LXC container via `pct create` 2. Creates LXC container via `pct create`
3. Executes `install/AppName-install.sh` inside container 3. Executes `install/AppName-install.sh` inside container
@@ -402,7 +398,6 @@ msg_ok "Completed successfully!\n"
**Symptom**: `pct create` exits with error code 209 **Symptom**: `pct create` exits with error code 209
**Solution**: **Solution**:
```bash ```bash
# Check existing containers # Check existing containers
pct list | grep CTID pct list | grep CTID
@@ -416,7 +411,6 @@ pct destroy CTID
### Update Function Doesn't Detect New Version ### Update Function Doesn't Detect New Version
**Debug**: **Debug**:
```bash ```bash
# Check version file # Check version file
cat /opt/AppName_version.txt cat /opt/AppName_version.txt
@@ -432,7 +426,6 @@ curl -fsSL https://api.github.com/repos/user/repo/releases/latest | grep tag_nam
Before submitting a PR: Before submitting a PR:
### Script Structure ### Script Structure
- [ ] Shebang is `#!/usr/bin/env bash` - [ ] Shebang is `#!/usr/bin/env bash`
- [ ] Imports `build.func` from community-scripts repo - [ ] Imports `build.func` from community-scripts repo
- [ ] Copyright header with author and source URL - [ ] Copyright header with author and source URL
@@ -440,20 +433,17 @@ Before submitting a PR:
- [ ] `var_tags` are semicolon-separated (no spaces) - [ ] `var_tags` are semicolon-separated (no spaces)
### Default Values ### Default Values
- [ ] `var_cpu` set appropriately (2-4 for most apps) - [ ] `var_cpu` set appropriately (2-4 for most apps)
- [ ] `var_ram` set appropriately (1024-4096 MB minimum) - [ ] `var_ram` set appropriately (1024-4096 MB minimum)
- [ ] `var_disk` sufficient for app + data (5-20 GB) - [ ] `var_disk` sufficient for app + data (5-20 GB)
- [ ] `var_os` is realistic - [ ] `var_os` is realistic
### Functions ### Functions
- [ ] `update_script()` implemented - [ ] `update_script()` implemented
- [ ] Update function checks if app installed - [ ] Update function checks if app installed
- [ ] Proper error handling with `msg_error` - [ ] Proper error handling with `msg_error`
### Testing ### Testing
- [ ] Script tested with default installation - [ ] Script tested with default installation
- [ ] Script tested with advanced (19-step) installation - [ ] Script tested with advanced (19-step) installation
- [ ] Update function tested on existing installation - [ ] Update function tested on existing installation

View File

@@ -1,65 +0,0 @@
## 🤖 PocketBase Bot — Command Reference
> Available to **org members only** (Contributors team).
> Trigger by posting a comment on any Issue or PR.
---
### 🔧 Field Updates
Simple key=value pairs. Multiple in one line.
```
/pocketbase <slug> field=value [field=value ...]
```
**Boolean fields** (`true`/`false`): `updateable` `privileged` `has_arm` `is_dev` `is_disabled` `is_deleted`
**Text fields**: `name` `description` `logo` `documentation` `website` `project_url` `github` `config_path` `disable_message` `deleted_message`
**Number**: `port`
**Nullable**: `default_user` `default_passwd` *(empty value = null: `default_passwd=`)*
**Examples:**
```
/pocketbase homeassistant is_disabled=true disable_message="Broken upstream"
/pocketbase homeassistant documentation=https://www.home-assistant.io/docs
/pocketbase homeassistant is_dev=false
/pocketbase homeassistant default_passwd=
```
---
### 📝 set — HTML / Multiline / Special Characters
Use a code block for values that contain HTML, links, quotes or newlines.
````
/pocketbase <slug> set <field>
```
Your content here — HTML tags, links, quotes, all fine
```
````
**Allowed fields:** `name` `description` `logo` `documentation` `website` `project_url` `github` `config_path` `disable_message` `deleted_message`
---
### 🗒️ Notes
```
/pocketbase <slug> note list
/pocketbase <slug> note add <type> "<text>"
/pocketbase <slug> note edit <type> "<old text>" "<new text>"
/pocketbase <slug> note remove <type> "<text>"
```
Note types come from `z_ref_note_types` in PocketBase (e.g. `info`, `warning`).
If text doesn't match exactly, the bot lists all current notes automatically.
---
### ⚙️ Install Method Resources
```
/pocketbase <slug> method list
/pocketbase <slug> method <type> hdd=10
/pocketbase <slug> method <type> cpu=4 ram=2048 hdd=20
```
`<type>` matches the install method type name (e.g. `default`, `alpine`). Use `method list` to see available types and current values. `ram` = MB, `hdd` = GB.
---
### 💡 Tips
- The bot reacts with 👀 when it picks up the command, ✅ on success, 👎 on error
- On any error, a comment explains what went wrong
- `note edit` / `note remove` show the current note list if the text doesn't match

View File

@@ -0,0 +1,26 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: cobalt (cobaltgit)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://ntfy.sh/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
msg_info "Installing ntfy"
$STD apk add --no-cache ntfy ntfy-openrc libcap
sed -i '/^listen-http/s/^\(.*\)$/#\1\n/' /etc/ntfy/server.yml
setcap 'cap_net_bind_service=+ep' /usr/bin/ntfy
$STD rc-update add ntfy default
$STD service ntfy start
msg_ok "Installed ntfy"
motd_ssh
customize

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: Nícolas Pastorello (opastorello) # Author: Nícolas Pastorello (opastorello)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/jumpserver/jumpserver # Source: https://github.com/jumpserver/jumpserver
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 tteck # Copyright (c) 2021-2026 tteck
# Author: tteck (tteckster) # Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://petio.tv/ # Source: https://petio.tv/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 tteck # Copyright (c) 2021-2026 tteck
# Author: tteck (tteckster) # Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://nginxproxymanager.com/ # Source: https://nginxproxymanager.com/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: CrazyWolf13 # Author: CrazyWolf13
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/arunavo4/gitea-mirror # Source: https://github.com/arunavo4/gitea-mirror
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (Canbiz) # Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/agersant/polaris # Source: https://github.com/agersant/polaris
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ) # Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color color

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: SunFlowerOwl # Author: SunFlowerOwl
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/haugene/docker-transmission-openvpn # Source: https://github.com/haugene/docker-transmission-openvpn
# Import Functions und Setup # Import Functions und Setup

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 tteck # Copyright (c) 2021-2026 tteck
# Author: tteck (tteckster) # Author: tteck (tteckster)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/dani-garcia/vaultwarden # Source: https://github.com/dani-garcia/vaultwarden
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (Canbiz) # Author: MickLesk (Canbiz)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://vikunja.io/ # Source: https://vikunja.io/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk # Author: MickLesk
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/ente-io/ente # Source: https://github.com/ente-io/ente
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
@@ -29,6 +29,7 @@ NODE_VERSION="24" NODE_MODULE="yarn" setup_nodejs
RUST_CRATES="wasm-pack" setup_rust RUST_CRATES="wasm-pack" setup_rust
$STD rustup target add wasm32-unknown-unknown $STD rustup target add wasm32-unknown-unknown
fetch_and_deploy_gh_release "ente-server" "ente-io/ente" "tarball" "latest" "/opt/ente" fetch_and_deploy_gh_release "ente-server" "ente-io/ente" "tarball" "latest" "/opt/ente"
msg_info "Building Ente CLI" msg_info "Building Ente CLI"

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: Simon Friedrich # Author: Simon Friedrich
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://forgejo.org/ # Source: https://forgejo.org/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -1,139 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://github.com/danny-avila/LibreChat
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
MONGO_VERSION="8.0" setup_mongodb
setup_meilisearch
PG_VERSION="17" PG_MODULES="pgvector" setup_postgresql
PG_DB_NAME="ragapi" PG_DB_USER="ragapi" PG_DB_EXTENSIONS="vector" setup_postgresql_db
NODE_VERSION="22" setup_nodejs
UV_PYTHON="3.12" setup_uv
fetch_and_deploy_gh_tag "librechat" "danny-avila/LibreChat"
fetch_and_deploy_gh_release "rag-api" "danny-avila/rag_api" "tarball"
msg_info "Installing LibreChat Dependencies"
cd /opt/librechat
$STD npm ci
msg_ok "Installed LibreChat Dependencies"
msg_info "Building Frontend"
$STD npm run frontend
$STD npm prune --production
$STD npm cache clean --force
msg_ok "Built Frontend"
msg_info "Installing RAG API Dependencies"
cd /opt/rag-api
$STD uv venv --python 3.12 --seed .venv
$STD .venv/bin/pip install -r requirements.lite.txt
mkdir -p /opt/rag-api/uploads
msg_ok "Installed RAG API Dependencies"
msg_info "Configuring LibreChat"
JWT_SECRET=$(openssl rand -hex 32)
JWT_REFRESH_SECRET=$(openssl rand -hex 32)
CREDS_KEY=$(openssl rand -hex 32)
CREDS_IV=$(openssl rand -hex 16)
cat <<EOF >/opt/librechat/.env
HOST=0.0.0.0
PORT=3080
MONGO_URI=mongodb://127.0.0.1:27017/LibreChat
DOMAIN_CLIENT=http://${LOCAL_IP}:3080
DOMAIN_SERVER=http://${LOCAL_IP}:3080
NO_INDEX=true
TRUST_PROXY=1
JWT_SECRET=${JWT_SECRET}
JWT_REFRESH_SECRET=${JWT_REFRESH_SECRET}
SESSION_EXPIRY=1000 * 60 * 15
REFRESH_TOKEN_EXPIRY=(1000 * 60 * 60 * 24) * 7
CREDS_KEY=${CREDS_KEY}
CREDS_IV=${CREDS_IV}
ALLOW_EMAIL_LOGIN=true
ALLOW_REGISTRATION=true
ALLOW_SOCIAL_LOGIN=false
ALLOW_SOCIAL_REGISTRATION=false
ALLOW_PASSWORD_RESET=false
ALLOW_UNVERIFIED_EMAIL_LOGIN=true
SEARCH=true
MEILI_NO_ANALYTICS=true
MEILI_HOST=http://127.0.0.1:7700
MEILI_MASTER_KEY=${MEILISEARCH_MASTER_KEY}
RAG_PORT=8000
RAG_API_URL=http://127.0.0.1:8000
APP_TITLE=LibreChat
ENDPOINTS=openAI,agents,assistants,anthropic,google
# OPENAI_API_KEY=your-key-here
# OPENAI_MODELS=
# ANTHROPIC_API_KEY=your-key-here
# GOOGLE_KEY=your-key-here
EOF
msg_ok "Configured LibreChat"
msg_info "Configuring RAG API"
cat <<EOF >/opt/rag-api/.env
VECTOR_DB_TYPE=pgvector
DB_HOST=127.0.0.1
DB_PORT=5432
POSTGRES_DB=${PG_DB_NAME}
POSTGRES_USER=${PG_DB_USER}
POSTGRES_PASSWORD=${PG_DB_PASS}
RAG_HOST=0.0.0.0
RAG_PORT=8000
JWT_SECRET=${JWT_SECRET}
RAG_UPLOAD_DIR=/opt/rag-api/uploads/
EOF
msg_ok "Configured RAG API"
msg_info "Creating Services"
cat <<EOF >/etc/systemd/system/librechat.service
[Unit]
Description=LibreChat
After=network.target mongod.service meilisearch.service rag-api.service
[Service]
Type=simple
User=root
WorkingDirectory=/opt/librechat
EnvironmentFile=/opt/librechat/.env
ExecStart=/usr/bin/npm run backend
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
cat <<EOF >/etc/systemd/system/rag-api.service
[Unit]
Description=LibreChat RAG API
After=network.target postgresql.service
[Service]
Type=simple
User=root
WorkingDirectory=/opt/rag-api
EnvironmentFile=/opt/rag-api/.env
ExecStart=/opt/rag-api/.venv/bin/uvicorn main:app --host 0.0.0.0 --port 8000
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
systemctl enable -q --now rag-api librechat
msg_ok "Created Services"
motd_ssh
customize
cleanup_lxc

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2025 minthcm # Copyright (c) 2021-2025 minthcm
# Author: MintHCM # Author: MintHCM
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://github.com/minthcm/minthcm # Source: https://github.com/minthcm/minthcm
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
@@ -17,7 +17,7 @@ PHP_VERSION="8.2"
PHP_APACHE="YES" PHP_MODULE="mysql,redis" PHP_FPM="YES" setup_php PHP_APACHE="YES" PHP_MODULE="mysql,redis" PHP_FPM="YES" setup_php
setup_composer setup_composer
setup_mariadb setup_mariadb
$STD mariadb -u root -e "SET GLOBAL sql_mode='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'" $STD mariadb -u root -e "SET GLOBAL sql_mode='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION'";
fetch_and_deploy_gh_release "MintHCM" "minthcm/minthcm" "tarball" "latest" "/var/www/MintHCM" fetch_and_deploy_gh_release "MintHCM" "minthcm/minthcm" "tarball" "latest" "/var/www/MintHCM"

View File

@@ -1,34 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG
# Author: Slaviša Arežina (tremor021)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE
# Source: https://goteleport.com/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
color
verb_ip6
catch_errors
setting_up_container
network_check
update_os
setup_deb822_repo \
"teleport" \
"https://deb.releases.teleport.dev/teleport-pubkey.asc" \
"https://apt.releases.teleport.dev/debian" \
"trixie" \
"stable/v18"
msg_info "Configuring Teleport"
$STD apt install -y teleport
$STD teleport configure -o /etc/teleport.yaml
systemctl enable -q --now teleport
sleep 5
tctl users add teleport-admin --roles=editor,access --logins=root >~/teleportadmin.creds
sed -i "s|https://[^:]*:3080|https://${LOCAL_IP}:3080|g" ~/teleportadmin.creds
msg_ok "Configured Teleport"
motd_ssh
customize
cleanup_lxc

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2025 community-scripts ORG # Copyright (c) 2021-2025 community-scripts ORG
# Author: KernelSailor # Author: KernelSailor
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://snowflake.torproject.org/ # Source: https://snowflake.torproject.org/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: dave-yap (dave-yap) | Co-Author: remz1337 # Author: dave-yap (dave-yap) | Co-Author: remz1337
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# Source: https://zitadel.com/ # Source: https://zitadel.com/
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH" source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"

View File

@@ -1,52 +0,0 @@
{
"name": "LibreChat",
"slug": "librechat",
"categories": [
20
],
"date_created": "2026-03-18",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 3080,
"documentation": "https://www.librechat.ai/docs",
"website": "https://www.librechat.ai/",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/librechat.webp",
"config_path": "/opt/librechat/.env",
"description": "LibreChat is an open-source AI chat platform that supports multiple AI providers including OpenAI, Anthropic, Google, and more. It features conversation history, multi-modal support, custom endpoints, and a plugin system.",
"install_methods": [
{
"type": "default",
"script": "ct/librechat.sh",
"resources": {
"cpu": 4,
"ram": 6144,
"hdd": 20,
"os": "Debian",
"version": "13"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": [
{
"text": "Register the first account via the web interface — it becomes the admin account.",
"type": "info"
},
{
"text": "Add your AI provider API keys to /opt/librechat/.env (OPENAI_API_KEY, ANTHROPIC_API_KEY, etc.) and restart librechat. OpenAI, Anthropic, Google and Agents endpoints are pre-enabled via ENDPOINTS.",
"type": "info"
},
{
"text": "RAG API is included and running on port 8000. Set RAG_OPENAI_API_KEY in /opt/rag-api/.env to enable document Q&A.",
"type": "info"
},
{
"text": "For local embeddings without an API key, set EMBEDDINGS_PROVIDER=ollama and OLLAMA_BASE_URL=http://<ollama-host>:11434 in /opt/rag-api/.env and restart rag-api.",
"type": "info"
}
]
}

View File

@@ -1,41 +0,0 @@
{
"name": "Teleport",
"slug": "teleport",
"categories": [
6
],
"date_created": "2026-03-18",
"type": "ct",
"updateable": true,
"privileged": false,
"interface_port": 3080,
"documentation": "https://goteleport.com/docs/",
"config_path": "/etc/teleport.yaml",
"website": "https://goteleport.com/",
"github": "https://github.com/gravitational/teleport",
"logo": "https://cdn.jsdelivr.net/gh/selfhst/icons@main/webp/teleport.webp",
"description": "Teleport unifies identities — humans, machines, and AI — with strong identity implementation to speed up engineering, improve resiliency against identity-based attacks, and control AI in production infrastructure.",
"install_methods": [
{
"type": "default",
"script": "ct/teleport.sh",
"resources": {
"cpu": 1,
"ram": 1024,
"hdd": 4,
"os": "Debian",
"version": "13"
}
}
],
"default_credentials": {
"username": null,
"password": null
},
"notes": [
{
"text": "After installation finishes, do `cat ~/teleportadmin.creds` inside LXC to get admin creation URL.",
"type": "info"
}
]
}

View File

@@ -1,12 +1,12 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: tteck (tteckster) # Author: tteck (tteckster)
# Co-Author: MickLesk # Co-Author: MickLesk
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
if ! command -v curl >/dev/null 2>&1; then if ! command -v curl >/dev/null 2>&1; then
apk update && apk add curl >/dev/null 2>&1 apk update && apk add curl >/dev/null 2>&1
fi fi
COMMUNITY_SCRIPTS_URL="${COMMUNITY_SCRIPTS_URL:-https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main}" COMMUNITY_SCRIPTS_URL="${COMMUNITY_SCRIPTS_URL:-https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main}"
source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/core.func") source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/core.func")
source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/error_handler.func") source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/error_handler.func")
load_functions load_functions

View File

@@ -1,6 +1,6 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: michelroegl-brunner | MickLesk # Author: michelroegl-brunner | MickLesk
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/LICENSE # License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/LICENSE
# ============================================================================== # ==============================================================================
# API.FUNC - TELEMETRY & DIAGNOSTICS API # API.FUNC - TELEMETRY & DIAGNOSTICS API

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: tteck (tteckster) | MickLesk | michelroegl-brunner # Author: tteck (tteckster) | MickLesk | michelroegl-brunner
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/LICENSE # License: MIT | https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/LICENSE
# ============================================================================== # ==============================================================================
# BUILD.FUNC - LXC CONTAINER BUILD & CONFIGURATION # BUILD.FUNC - LXC CONTAINER BUILD & CONFIGURATION
@@ -85,7 +85,7 @@ variables() {
# Configurable base URL for development — override with COMMUNITY_SCRIPTS_URL # Configurable base URL for development — override with COMMUNITY_SCRIPTS_URL
# See docs/DEV_MODE.md for details # See docs/DEV_MODE.md for details
COMMUNITY_SCRIPTS_URL="${COMMUNITY_SCRIPTS_URL:-https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main}" COMMUNITY_SCRIPTS_URL="${COMMUNITY_SCRIPTS_URL:-https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main}"
source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/api.func") source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/api.func")
@@ -631,7 +631,7 @@ run_preflight() {
done done
echo "" echo ""
echo -e "${INFO} Please resolve the above issues before creating a container." echo -e "${INFO} Please resolve the above issues before creating a container."
echo -e "${INFO} Documentation: ${BL}https://community-scripts.github.io/ProxmoxVED/${CL}" echo -e "${INFO} Documentation: ${BL}https://community-scripts.github.io/ProxmoxVE/${CL}"
# Report to telemetry (if consent was given) # Report to telemetry (if consent was given)
post_preflight_to_api post_preflight_to_api
@@ -3294,7 +3294,7 @@ DIAGNOSTICS=yes
#This file is used to store the diagnostics settings for the Community-Scripts API. #This file is used to store the diagnostics settings for the Community-Scripts API.
#https://git.community-scripts.org/community-scripts/ProxmoxVED/discussions/1836 #https://git.community-scripts.org/community-scripts/ProxmoxVED/discussions/1836
#Your diagnostics will be sent to the Community-Scripts API for troubleshooting/statistical purposes. #Your diagnostics will be sent to the Community-Scripts API for troubleshooting/statistical purposes.
#You can review the data at https://community-scripts.github.io/ProxmoxVED/data #You can review the data at https://community-scripts.github.io/ProxmoxVE/data
#If you do not wish to send diagnostics, please set the variable 'DIAGNOSTICS' to "no" in /usr/local/community-scripts/diagnostics, or use the menue. #If you do not wish to send diagnostics, please set the variable 'DIAGNOSTICS' to "no" in /usr/local/community-scripts/diagnostics, or use the menue.
#This will disable the diagnostics feature. #This will disable the diagnostics feature.
#To send diagnostics, set the variable 'DIAGNOSTICS' to "yes" in /usr/local/community-scripts/diagnostics, or use the menue. #To send diagnostics, set the variable 'DIAGNOSTICS' to "yes" in /usr/local/community-scripts/diagnostics, or use the menue.
@@ -3319,7 +3319,7 @@ DIAGNOSTICS=no
#This file is used to store the diagnostics settings for the Community-Scripts API. #This file is used to store the diagnostics settings for the Community-Scripts API.
#https://git.community-scripts.org/community-scripts/ProxmoxVED/discussions/1836 #https://git.community-scripts.org/community-scripts/ProxmoxVED/discussions/1836
#Your diagnostics will be sent to the Community-Scripts API for troubleshooting/statistical purposes. #Your diagnostics will be sent to the Community-Scripts API for troubleshooting/statistical purposes.
#You can review the data at https://community-scripts.github.io/ProxmoxVED/data #You can review the data at https://community-scripts.github.io/ProxmoxVE/data
#If you do not wish to send diagnostics, please set the variable 'DIAGNOSTICS' to "no" in /usr/local/community-scripts/diagnostics, or use the menue. #If you do not wish to send diagnostics, please set the variable 'DIAGNOSTICS' to "no" in /usr/local/community-scripts/diagnostics, or use the menue.
#This will disable the diagnostics feature. #This will disable the diagnostics feature.
#To send diagnostics, set the variable 'DIAGNOSTICS' to "yes" in /usr/local/community-scripts/diagnostics, or use the menue. #To send diagnostics, set the variable 'DIAGNOSTICS' to "yes" in /usr/local/community-scripts/diagnostics, or use the menue.
@@ -4935,7 +4935,7 @@ EOF'
set +Eeuo pipefail set +Eeuo pipefail
trap - ERR trap - ERR
local _LXC_CAPTURE_LOG="/tmp/.install-capture-${SESSION_ID}.log" local _LXC_CAPTURE_LOG="/tmp/.install-capture-${SESSION_ID}.log"
lxc-attach -n "$CTID" -- bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/${var_install}.sh)" 2>&1 | tee "$_LXC_CAPTURE_LOG" lxc-attach -n "$CTID" -- bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/install/${var_install}.sh)" 2>&1 | tee "$_LXC_CAPTURE_LOG"
local apt_retry_exit=${PIPESTATUS[0]} local apt_retry_exit=${PIPESTATUS[0]}
set -Eeuo pipefail set -Eeuo pipefail
trap 'error_handler' ERR trap 'error_handler' ERR
@@ -6006,7 +6006,7 @@ description() {
cat <<EOF cat <<EOF
<div align='center'> <div align='center'>
<a href='https://Helper-Scripts.com' target='_blank' rel='noopener noreferrer'> <a href='https://Helper-Scripts.com' target='_blank' rel='noopener noreferrer'>
<img src='https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/images/logo-81x112.png' alt='Logo' style='width:81px;height:112px;'/> <img src='https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/misc/images/logo-81x112.png' alt='Logo' style='width:81px;height:112px;'/>
</a> </a>
<h2 style='font-size: 24px; margin: 20px 0;'>${APP} LXC</h2> <h2 style='font-size: 24px; margin: 20px 0;'>${APP} LXC</h2>

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: community-scripts ORG # Author: community-scripts ORG
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/branch/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/branch/main/LICENSE
# Revision: 1 # Revision: 1
# ============================================================================== # ==============================================================================
@@ -17,7 +17,7 @@
# - Cloud-Init status monitoring and waiting # - Cloud-Init status monitoring and waiting
# #
# Usage: # Usage:
# source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/cloud-init.func) # source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/cloud-init.func)
# setup_cloud_init "$VMID" "$STORAGE" "$HN" "yes" # setup_cloud_init "$VMID" "$STORAGE" "$HN" "yes"
# #
# Compatible with: Debian, Ubuntu, and all Cloud-Init enabled distributions # Compatible with: Debian, Ubuntu, and all Cloud-Init enabled distributions

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/LICENSE # License: MIT | https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/LICENSE
# ============================================================================== # ==============================================================================
# CORE FUNCTIONS - LXC CONTAINER UTILITIES # CORE FUNCTIONS - LXC CONTAINER UTILITIES
@@ -491,6 +491,7 @@ log_section() {
# - Executes command with output redirected to active log file # - Executes command with output redirected to active log file
# - On error: displays last 20 lines of log and exits with original exit code # - On error: displays last 20 lines of log and exits with original exit code
# - Temporarily disables error trap to capture exit code correctly # - Temporarily disables error trap to capture exit code correctly
# - Sources explain_exit_code() for detailed error messages # - Sources explain_exit_code() for detailed error messages
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
@@ -521,7 +522,7 @@ silent() {
if [[ $rc -ne 0 ]]; then if [[ $rc -ne 0 ]]; then
# Source explain_exit_code if needed # Source explain_exit_code if needed
if ! declare -f explain_exit_code >/dev/null 2>&1; then if ! declare -f explain_exit_code >/dev/null 2>&1; then
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/misc/error_handler.func) source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/error_handler.func)
fi fi
local explanation local explanation
@@ -790,7 +791,7 @@ exit_script() {
get_header() { get_header() {
local app_name=$(echo "${APP,,}" | tr -d ' ') local app_name=$(echo "${APP,,}" | tr -d ' ')
local app_type=${APP_TYPE:-ct} # Default to 'ct' if not set local app_type=${APP_TYPE:-ct} # Default to 'ct' if not set
local header_url="https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/${app_type}/headers/${app_name}" local header_url="https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/${app_type}/headers/${app_name}"
local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}" local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}"
mkdir -p "$(dirname "$local_header_path")" mkdir -p "$(dirname "$local_header_path")"

View File

@@ -4,7 +4,7 @@
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ) # Author: MickLesk (CanbiZ)
# License: MIT | https://github.com/community-scripts/ProxmoxVED/raw/main/LICENSE # License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# #
# Provides comprehensive error handling and signal management for all scripts. # Provides comprehensive error handling and signal management for all scripts.

View File

@@ -173,7 +173,7 @@ _bootstrap() {
fi fi
# Configurable base URL for development — override with COMMUNITY_SCRIPTS_URL # Configurable base URL for development — override with COMMUNITY_SCRIPTS_URL
COMMUNITY_SCRIPTS_URL="${COMMUNITY_SCRIPTS_URL:-https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main}" COMMUNITY_SCRIPTS_URL="${COMMUNITY_SCRIPTS_URL:-https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main}"
# Source core functions # Source core functions
source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/core.func") source <(curl -fsSL "$COMMUNITY_SCRIPTS_URL/misc/core.func")
@@ -944,7 +944,7 @@ EOF
# Create update script # Create update script
# Use var_os for OS-based containers, otherwise use app name # Use var_os for OS-based containers, otherwise use app name
local update_script_name="${var_os:-$app}" local update_script_name="${var_os:-$app}"
echo "bash -c \"\$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/ct/${update_script_name}.sh)\"" >/usr/bin/update echo "bash -c \"\$(curl -fsSL https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/ct/${update_script_name}.sh)\"" >/usr/bin/update
chmod +x /usr/bin/update chmod +x /usr/bin/update
# Inject SSH authorized keys if provided # Inject SSH authorized keys if provided

View File

@@ -105,13 +105,11 @@ curl_with_retry() {
fi fi
fi fi
debug_log "curl attempt $attempt failed (timeout=${timeout}s), waiting ${backoff}s before retry..." debug_log "curl attempt $attempt failed, waiting ${backoff}s before retry..."
sleep "$backoff" sleep "$backoff"
# Exponential backoff: 1, 2, 4, 8... capped at 30s # Exponential backoff: 1, 2, 4, 8... capped at 30s
backoff=$((backoff * 2)) backoff=$((backoff * 2))
((backoff > 30)) && backoff=30 ((backoff > 30)) && backoff=30
# Double --max-time on each retry so slow connections can finish
timeout=$((timeout * 2))
((attempt++)) ((attempt++))
done done
@@ -174,10 +172,8 @@ curl_api_with_retry() {
return 0 return 0
fi fi
debug_log "curl API attempt $attempt failed (HTTP $http_code, timeout=${timeout}s), waiting ${attempt}s..." debug_log "curl API attempt $attempt failed (HTTP $http_code), waiting ${attempt}s..."
sleep "$attempt" sleep "$attempt"
# Double --max-time on each retry so slow connections can finish
timeout=$((timeout * 2))
((attempt++)) ((attempt++))
done done
@@ -938,11 +934,7 @@ upgrade_package() {
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Repository availability check with caching # Repository availability check with caching
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Note: Must use -gA (global) because tools.func is sourced inside update_os() declare -A _REPO_CACHE 2>/dev/null || true
# function scope. Plain 'declare -A' would create a local variable that gets
# destroyed when update_os() returns, causing "unbound variable" errors later
# when setup_postgresql/verify_repo_available tries to access the cache key.
declare -gA _REPO_CACHE 2>/dev/null || declare -A _REPO_CACHE 2>/dev/null || true
verify_repo_available() { verify_repo_available() {
local repo_url="$1" local repo_url="$1"
@@ -973,43 +965,13 @@ verify_repo_available() {
} }
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Ensure dependencies are installed (with apt/apk update caching) # Ensure dependencies are installed (with apt update caching)
# Supports both Debian (apt/dpkg) and Alpine (apk) systems
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
ensure_dependencies() { ensure_dependencies() {
local deps=("$@") local deps=("$@")
local missing=() local missing=()
# Detect Alpine Linux # Fast batch check using dpkg-query (much faster than individual checks)
if [[ -f /etc/alpine-release ]]; then
for dep in "${deps[@]}"; do
if command -v "$dep" &>/dev/null; then
continue
fi
if apk info -e "$dep" &>/dev/null; then
continue
fi
missing+=("$dep")
done
if [[ ${#missing[@]} -gt 0 ]]; then
$STD apk add --no-cache "${missing[@]}" || {
local failed=()
for pkg in "${missing[@]}"; do
if ! $STD apk add --no-cache "$pkg" 2>/dev/null; then
failed+=("$pkg")
fi
done
if [[ ${#failed[@]} -gt 0 ]]; then
msg_error "Failed to install dependencies: ${failed[*]}"
return 1
fi
}
fi
return 0
fi
# Debian/Ubuntu: Fast batch check using dpkg-query
local installed_pkgs local installed_pkgs
installed_pkgs=$(dpkg-query -W -f='${Package}\n' 2>/dev/null | sort -u) installed_pkgs=$(dpkg-query -W -f='${Package}\n' 2>/dev/null | sort -u)
@@ -1106,53 +1068,11 @@ create_temp_dir() {
} }
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Check if package is installed (supports both Debian and Alpine) # Check if package is installed (faster than dpkg -l | grep)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
is_package_installed() { is_package_installed() {
local package="$1" local package="$1"
if [[ -f /etc/alpine-release ]]; then
apk info -e "$package" &>/dev/null
else
dpkg-query -W -f='${Status}' "$package" 2>/dev/null | grep -q "^install ok installed$" dpkg-query -W -f='${Status}' "$package" 2>/dev/null | grep -q "^install ok installed$"
fi
}
# ------------------------------------------------------------------------------
# Prompt user to enter a GitHub Personal Access Token (PAT) interactively
# Returns 0 if a valid token was provided, 1 otherwise
# ------------------------------------------------------------------------------
prompt_for_github_token() {
if [[ ! -t 0 ]]; then
return 1
fi
local reply
read -rp "${TAB}Would you like to enter a GitHub Personal Access Token (PAT)? [y/N]: " reply
reply="${reply:-n}"
if [[ ! "${reply,,}" =~ ^(y|yes)$ ]]; then
return 1
fi
local token
while true; do
read -rp "${TAB}Enter your GitHub PAT: " token
# Trim leading/trailing whitespace
token="$(echo "$token" | xargs)"
if [[ -z "$token" ]]; then
msg_warn "Token cannot be empty. Please try again."
continue
fi
if [[ "$token" =~ [[:space:]] ]]; then
msg_warn "Token must not contain spaces. Please try again."
continue
fi
break
done
export GITHUB_TOKEN="$token"
msg_ok "GitHub token has been set."
return 0
} }
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
@@ -1167,8 +1087,7 @@ github_api_call() {
local header_args=() local header_args=()
[[ -n "${GITHUB_TOKEN:-}" ]] && header_args=(-H "Authorization: Bearer $GITHUB_TOKEN") [[ -n "${GITHUB_TOKEN:-}" ]] && header_args=(-H "Authorization: Bearer $GITHUB_TOKEN")
local attempt=1 for attempt in $(seq 1 $max_retries); do
while ((attempt <= max_retries)); do
local http_code local http_code
http_code=$(curl -sSL -w "%{http_code}" -o "$output_file" \ http_code=$(curl -sSL -w "%{http_code}" -o "$output_file" \
-H "Accept: application/vnd.github+json" \ -H "Accept: application/vnd.github+json" \
@@ -1185,11 +1104,7 @@ github_api_call() {
if [[ -n "${GITHUB_TOKEN:-}" ]]; then if [[ -n "${GITHUB_TOKEN:-}" ]]; then
msg_error "Your GITHUB_TOKEN appears to be invalid or expired." msg_error "Your GITHUB_TOKEN appears to be invalid or expired."
else else
msg_error "The repository may require authentication." msg_error "The repository may require authentication. Try: export GITHUB_TOKEN=\"ghp_your_token\""
fi
if prompt_for_github_token; then
header_args=(-H "Authorization: Bearer $GITHUB_TOKEN")
continue
fi fi
return 1 return 1
;; ;;
@@ -1199,16 +1114,9 @@ github_api_call() {
msg_warn "GitHub API rate limit, waiting ${retry_delay}s... (attempt $attempt/$max_retries)" msg_warn "GitHub API rate limit, waiting ${retry_delay}s... (attempt $attempt/$max_retries)"
sleep "$retry_delay" sleep "$retry_delay"
retry_delay=$((retry_delay * 2)) retry_delay=$((retry_delay * 2))
((attempt++))
continue continue
fi fi
msg_error "GitHub API rate limit exceeded (HTTP 403)." msg_error "GitHub API rate limit exceeded (HTTP 403)."
if prompt_for_github_token; then
header_args=(-H "Authorization: Bearer $GITHUB_TOKEN")
retry_delay=2
attempt=1
continue
fi
msg_error "To increase the limit, export a GitHub token before running the script:" msg_error "To increase the limit, export a GitHub token before running the script:"
msg_error " export GITHUB_TOKEN=\"ghp_your_token_here\"" msg_error " export GITHUB_TOKEN=\"ghp_your_token_here\""
return 1 return 1
@@ -1220,7 +1128,6 @@ github_api_call() {
000 | "") 000 | "")
if [[ $attempt -lt $max_retries ]]; then if [[ $attempt -lt $max_retries ]]; then
sleep "$retry_delay" sleep "$retry_delay"
((attempt++))
continue continue
fi fi
msg_error "GitHub API connection failed (no response)." msg_error "GitHub API connection failed (no response)."
@@ -1230,14 +1137,12 @@ github_api_call() {
*) *)
if [[ $attempt -lt $max_retries ]]; then if [[ $attempt -lt $max_retries ]]; then
sleep "$retry_delay" sleep "$retry_delay"
((attempt++))
continue continue
fi fi
msg_error "GitHub API call failed (HTTP $http_code)." msg_error "GitHub API call failed (HTTP $http_code)."
return 1 return 1
;; ;;
esac esac
((attempt++))
done done
msg_error "GitHub API call failed after ${max_retries} attempts: ${url}" msg_error "GitHub API call failed after ${max_retries} attempts: ${url}"
@@ -1827,13 +1732,6 @@ setup_deb822_repo() {
rm -f "$tmp_gpg" rm -f "$tmp_gpg"
return 1 return 1
} }
else
# Already binary — copy directly
cp -f "$tmp_gpg" "/etc/apt/keyrings/${name}.gpg" || {
msg_error "Failed to install GPG key for ${name}"
rm -f "$tmp_gpg"
return 1
}
fi fi
rm -f "$tmp_gpg" rm -f "$tmp_gpg"
chmod 644 "/etc/apt/keyrings/${name}.gpg" chmod 644 "/etc/apt/keyrings/${name}.gpg"
@@ -1979,47 +1877,6 @@ extract_version_from_json() {
fi fi
} }
# ------------------------------------------------------------------------------
# Get latest GitHub tag (for repos that only publish tags, not releases).
#
# Usage:
# get_latest_gh_tag "owner/repo" [prefix]
#
# Arguments:
# $1 - GitHub repo (owner/repo)
# $2 - Optional prefix filter (e.g., "v" to only match tags starting with "v")
#
# Returns:
# Latest tag name (stdout), or returns 1 on failure
# ------------------------------------------------------------------------------
get_latest_gh_tag() {
local repo="$1"
local prefix="${2:-}"
local temp_file
temp_file=$(mktemp)
if ! github_api_call "https://api.github.com/repos/${repo}/tags?per_page=50" "$temp_file"; then
rm -f "$temp_file"
return 1
fi
local tag=""
if [[ -n "$prefix" ]]; then
tag=$(jq -r --arg p "$prefix" '[.[] | select(.name | startswith($p))][0].name // empty' "$temp_file")
else
tag=$(jq -r '.[0].name // empty' "$temp_file")
fi
rm -f "$temp_file"
if [[ -z "$tag" ]]; then
msg_error "No tags found for ${repo}"
return 1
fi
echo "$tag"
}
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Get latest GitHub release version with fallback to tags # Get latest GitHub release version with fallback to tags
# Usage: get_latest_github_release "owner/repo" [strip_v] [include_prerelease] # Usage: get_latest_github_release "owner/repo" [strip_v] [include_prerelease]
@@ -2118,129 +1975,101 @@ verify_gpg_fingerprint() {
} }
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Fetches and deploys a GitHub tag-based source tarball. # Get latest GitHub tag for a repository.
# #
# Description: # Description:
# - Downloads the source tarball for a given tag from GitHub # - Queries the GitHub API for tags (not releases)
# - Extracts to the target directory # - Useful for repos that only create tags, not full releases
# - Writes the version to ~/.<app> # - Supports optional prefix filter and version-only extraction
# - Returns the latest tag name (printed to stdout)
# #
# Usage: # Usage:
# fetch_and_deploy_gh_tag "guacd" "apache/guacamole-server" # MONGO_VERSION=$(get_latest_gh_tag "mongodb/mongo-tools")
# fetch_and_deploy_gh_tag "guacd" "apache/guacamole-server" "latest" "/opt/guacamole-server" # LATEST=$(get_latest_gh_tag "owner/repo" "v") # only tags starting with "v"
# LATEST=$(get_latest_gh_tag "owner/repo" "" "true") # strip leading "v"
# #
# Arguments: # Arguments:
# $1 - App name (used for version file ~/.<app>) # $1 - GitHub repo (owner/repo)
# $2 - GitHub repo (owner/repo) # $2 - Tag prefix filter (optional, e.g. "v" or "100.")
# $3 - Tag version (default: "latest" → auto-detect via get_latest_gh_tag) # $3 - Strip prefix from result (optional, "true" to strip $2 prefix)
# $4 - Target directory (default: /opt/$app) #
# Returns:
# 0 on success (tag printed to stdout), 1 on failure
# #
# Notes: # Notes:
# - Supports CLEAN_INSTALL=1 to wipe target before extracting # - Skips tags containing "rc", "alpha", "beta", "dev", "test"
# - For repos that only publish tags, not GitHub Releases # - Sorts by version number (sort -V) to find the latest
# - Respects GITHUB_TOKEN for rate limiting
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
fetch_and_deploy_gh_tag() { get_latest_gh_tag() {
local app="$1" local repo="$1"
local repo="$2" local prefix="${2:-}"
local version="${3:-latest}" local strip_prefix="${3:-false}"
local target="${4:-/opt/$app}"
local app_lc=""
app_lc="$(echo "${app,,}" | tr -d ' ')"
local version_file="$HOME/.${app_lc}"
if [[ "$version" == "latest" ]]; then local header_args=()
version=$(get_latest_gh_tag "$repo") || { [[ -n "${GITHUB_TOKEN:-}" ]] && header_args=(-H "Authorization: Bearer $GITHUB_TOKEN")
msg_error "Failed to determine latest tag for ${repo}"
local http_code=""
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gh_tags.json \
-H 'Accept: application/vnd.github+json' \
-H 'X-GitHub-Api-Version: 2022-11-28' \
"${header_args[@]}" \
"https://api.github.com/repos/${repo}/tags?per_page=100" 2>/dev/null) || true
if [[ "$http_code" == "401" ]]; then
msg_error "GitHub API authentication failed (HTTP 401)."
if [[ -n "${GITHUB_TOKEN:-}" ]]; then
msg_error "Your GITHUB_TOKEN appears to be invalid or expired."
else
msg_error "The repository may require authentication. Try: export GITHUB_TOKEN=\"ghp_your_token\""
fi
rm -f /tmp/gh_tags.json
return 1 return 1
}
fi fi
local current_version="" if [[ "$http_code" == "403" ]]; then
[[ -f "$version_file" ]] && current_version=$(<"$version_file") msg_error "GitHub API rate limit exceeded (HTTP 403)."
msg_error "To increase the limit, export a GitHub token before running the script:"
if [[ "$current_version" == "$version" ]]; then msg_error " export GITHUB_TOKEN=\"ghp_your_token_here\""
msg_ok "$app is already up-to-date ($version)" rm -f /tmp/gh_tags.json
return 0 return 1
fi fi
local tmpdir if [[ "$http_code" == "000" || -z "$http_code" ]]; then
tmpdir=$(mktemp -d) || return 1 msg_error "GitHub API connection failed (no response)."
local tarball_url="https://github.com/${repo}/archive/refs/tags/${version}.tar.gz" msg_error "Check your network/DNS: curl -sSL https://api.github.com/rate_limit"
local filename="${app_lc}-${version}.tar.gz" rm -f /tmp/gh_tags.json
msg_info "Fetching GitHub tag: ${app} (${version})"
download_file "$tarball_url" "$tmpdir/$filename" || {
msg_error "Download failed: $tarball_url"
rm -rf "$tmpdir"
return 1 return 1
}
mkdir -p "$target"
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
rm -rf "${target:?}/"*
fi fi
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || { if [[ "$http_code" != "200" ]] || [[ ! -s /tmp/gh_tags.json ]]; then
msg_error "Failed to extract tarball" msg_error "Unable to fetch tags for ${repo} (HTTP ${http_code})"
rm -rf "$tmpdir" rm -f /tmp/gh_tags.json
return 1 return 1
} fi
local unpack_dir local tags_json
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1) tags_json=$(</tmp/gh_tags.json)
rm -f /tmp/gh_tags.json
shopt -s dotglob nullglob
cp -r "$unpack_dir"/* "$target/"
shopt -u dotglob nullglob
rm -rf "$tmpdir"
echo "$version" >"$version_file"
msg_ok "Deployed ${app} ${version} to ${target}"
return 0
}
# ------------------------------------------------------------------------------
# Checks for new GitHub tag (for repos without releases).
#
# Description:
# - Uses get_latest_gh_tag to fetch the latest tag
# - Compares it to a local cached version (~/.<app>)
# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0
#
# Usage:
# if check_for_gh_tag "guacd" "apache/guacamole-server"; then
# fetch_and_deploy_gh_tag "guacd" "apache/guacamole-server" "/opt/guacamole-server"
# fi
#
# Notes:
# - For repos that only publish tags, not GitHub Releases
# - Same interface as check_for_gh_release
# ------------------------------------------------------------------------------
check_for_gh_tag() {
local app="$1"
local repo="$2"
local prefix="${3:-}"
local app_lc=""
app_lc="$(echo "${app,,}" | tr -d ' ')"
local current_file="$HOME/.${app_lc}"
msg_info "Checking for update: ${app}"
# Extract tag names, filter by prefix, exclude pre-release patterns, sort by version
local latest="" local latest=""
latest=$(get_latest_gh_tag "$repo" "$prefix") || return 1 latest=$(echo "$tags_json" | grep -oP '"name":\s*"\K[^"]+' |
{ [[ -n "$prefix" ]] && grep "^${prefix}" || cat; } |
grep -viE '(rc|alpha|beta|dev|test|preview|snapshot)' |
sort -V | tail -n1)
local current="" if [[ -z "$latest" ]]; then
[[ -f "$current_file" ]] && current="$(<"$current_file")" msg_warn "No matching tags found for ${repo}${prefix:+ (prefix: $prefix)}"
return 1
if [[ -z "$current" || "$current" != "$latest" ]]; then
CHECK_UPDATE_RELEASE="$latest"
msg_ok "Update available: ${app} ${current:-not installed}${latest}"
return 0
fi fi
msg_ok "No update available: ${app} (${latest})" if [[ "$strip_prefix" == "true" && -n "$prefix" ]]; then
return 1 latest="${latest#"$prefix"}"
fi
echo "$latest"
return 0
} }
# ============================================================================== # ==============================================================================
@@ -2292,35 +2121,6 @@ check_for_gh_release() {
# Try /latest endpoint for non-pinned versions (most efficient) # Try /latest endpoint for non-pinned versions (most efficient)
local releases_json="" http_code="" local releases_json="" http_code=""
# For pinned versions, query the specific release tag directly
if [[ -n "$pinned_version_in" ]]; then
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gh_check.json \
-H 'Accept: application/vnd.github+json' \
-H 'X-GitHub-Api-Version: 2022-11-28' \
"${header_args[@]}" \
"https://api.github.com/repos/${source}/releases/tags/${pinned_version_in}" 2>/dev/null) || true
if [[ "$http_code" == "200" ]] && [[ -s /tmp/gh_check.json ]]; then
releases_json="[$(</tmp/gh_check.json)]"
elif [[ "$http_code" == "401" ]]; then
msg_error "GitHub API authentication failed (HTTP 401)."
if [[ -n "${GITHUB_TOKEN:-}" ]]; then
msg_error "Your GITHUB_TOKEN appears to be invalid or expired."
else
msg_error "The repository may require authentication. Try: export GITHUB_TOKEN=\"ghp_your_token\""
fi
rm -f /tmp/gh_check.json
return 1
elif [[ "$http_code" == "403" ]]; then
msg_error "GitHub API rate limit exceeded (HTTP 403)."
msg_error "To increase the limit, export a GitHub token before running the script:"
msg_error " export GITHUB_TOKEN=\"ghp_your_token_here\""
rm -f /tmp/gh_check.json
return 1
fi
rm -f /tmp/gh_check.json
fi
if [[ -z "$pinned_version_in" ]]; then if [[ -z "$pinned_version_in" ]]; then
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gh_check.json \ http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gh_check.json \
-H 'Accept: application/vnd.github+json' \ -H 'Accept: application/vnd.github+json' \
@@ -2588,8 +2388,6 @@ check_for_codeberg_release() {
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
create_self_signed_cert() { create_self_signed_cert() {
local APP_NAME="${1:-${APPLICATION}}" local APP_NAME="${1:-${APPLICATION}}"
local HOSTNAME="$(hostname -f)"
local IP="$(hostname -I | awk '{print $1}')"
local APP_NAME_LC=$(echo "${APP_NAME,,}" | tr -d ' ') local APP_NAME_LC=$(echo "${APP_NAME,,}" | tr -d ' ')
local CERT_DIR="/etc/ssl/${APP_NAME_LC}" local CERT_DIR="/etc/ssl/${APP_NAME_LC}"
local CERT_KEY="${CERT_DIR}/${APP_NAME_LC}.key" local CERT_KEY="${CERT_DIR}/${APP_NAME_LC}.key"
@@ -2607,8 +2405,8 @@ create_self_signed_cert() {
mkdir -p "$CERT_DIR" mkdir -p "$CERT_DIR"
$STD openssl req -new -newkey rsa:2048 -days 365 -nodes -x509 \ $STD openssl req -new -newkey rsa:2048 -days 365 -nodes -x509 \
-subj "/CN=${HOSTNAME}" \ -subj "/CN=${APP_NAME}" \
-addext "subjectAltName=DNS:${HOSTNAME},DNS:localhost,IP:${IP},IP:127.0.0.1" \ -addext "subjectAltName=DNS:${APP_NAME}" \
-keyout "$CERT_KEY" \ -keyout "$CERT_KEY" \
-out "$CERT_CRT" || { -out "$CERT_CRT" || {
msg_error "Failed to create self-signed certificate" msg_error "Failed to create self-signed certificate"
@@ -2678,30 +2476,6 @@ function ensure_usr_local_bin_persist() {
fi fi
} }
# ------------------------------------------------------------------------------
# curl_download - Downloads a file with automatic retry and exponential backoff.
#
# Usage: curl_download <output_file> <url>
#
# Retries up to 5 times with increasing --max-time (60/120/240/480/960s).
# Returns 0 on success, 1 if all attempts fail.
# ------------------------------------------------------------------------------
function curl_download() {
local output="$1"
local url="$2"
local timeouts=(60 120 240 480 960)
for i in "${!timeouts[@]}"; do
if curl --connect-timeout 15 --max-time "${timeouts[$i]}" -fsSL -o "$output" "$url"; then
return 0
fi
if ((i < ${#timeouts[@]} - 1)); then
msg_warn "Download timed out after ${timeouts[$i]}s, retrying... (attempt $((i + 2))/${#timeouts[@]})"
fi
done
return 1
}
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# Downloads and deploys latest Codeberg release (source, binary, tarball, asset). # Downloads and deploys latest Codeberg release (source, binary, tarball, asset).
# #
@@ -2759,7 +2533,8 @@ function fetch_and_deploy_codeberg_release() {
local app_lc=$(echo "${app,,}" | tr -d ' ') local app_lc=$(echo "${app,,}" | tr -d ' ')
local version_file="$HOME/.${app_lc}" local version_file="$HOME/.${app_lc}"
local api_timeouts=(60 120 240) local api_timeout="--connect-timeout 10 --max-time 60"
local download_timeout="--connect-timeout 15 --max-time 900"
local current_version="" local current_version=""
[[ -f "$version_file" ]] && current_version=$(<"$version_file") [[ -f "$version_file" ]] && current_version=$(<"$version_file")
@@ -2799,7 +2574,7 @@ function fetch_and_deploy_codeberg_release() {
# Codeberg archive URL format: https://codeberg.org/{owner}/{repo}/archive/{tag}.tar.gz # Codeberg archive URL format: https://codeberg.org/{owner}/{repo}/archive/{tag}.tar.gz
local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz" local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz"
if curl_download "$tmpdir/$filename" "$archive_url"; then if curl $download_timeout -fsSL -o "$tmpdir/$filename" "$archive_url"; then
download_success=true download_success=true
fi fi
@@ -2846,18 +2621,16 @@ function fetch_and_deploy_codeberg_release() {
return 1 return 1
fi fi
local attempt=0 success=false resp http_code local max_retries=3 retry_delay=2 attempt=1 success=false resp http_code
while ((attempt < ${#api_timeouts[@]})); do while ((attempt <= max_retries)); do
resp=$(curl --connect-timeout 10 --max-time "${api_timeouts[$attempt]}" -fsSL -w "%{http_code}" -o /tmp/codeberg_rel.json "$api_url") && success=true && break resp=$(curl $api_timeout -fsSL -w "%{http_code}" -o /tmp/codeberg_rel.json "$api_url") && success=true && break
sleep "$retry_delay"
((attempt++)) ((attempt++))
if ((attempt < ${#api_timeouts[@]})); then
msg_warn "API request timed out after ${api_timeouts[$((attempt - 1))]}s, retrying... (attempt $((attempt + 1))/${#api_timeouts[@]})"
fi
done done
if ! $success; then if ! $success; then
msg_error "Failed to fetch release metadata from $api_url after ${#api_timeouts[@]} attempts" msg_error "Failed to fetch release metadata from $api_url after $max_retries attempts"
return 1 return 1
fi fi
@@ -2898,7 +2671,7 @@ function fetch_and_deploy_codeberg_release() {
# Codeberg archive URL format # Codeberg archive URL format
local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz" local archive_url="https://codeberg.org/$repo/archive/${tag_name}.tar.gz"
if curl_download "$tmpdir/$filename" "$archive_url"; then if curl $download_timeout -fsSL -o "$tmpdir/$filename" "$archive_url"; then
download_success=true download_success=true
fi fi
@@ -2972,7 +2745,7 @@ function fetch_and_deploy_codeberg_release() {
fi fi
filename="${url_match##*/}" filename="${url_match##*/}"
curl_download "$tmpdir/$filename" "$url_match" || { curl $download_timeout -fsSL -o "$tmpdir/$filename" "$url_match" || {
msg_error "Download failed: $url_match" msg_error "Download failed: $url_match"
rm -rf "$tmpdir" rm -rf "$tmpdir"
return 1 return 1
@@ -3015,7 +2788,7 @@ function fetch_and_deploy_codeberg_release() {
} }
filename="${asset_url##*/}" filename="${asset_url##*/}"
curl_download "$tmpdir/$filename" "$asset_url" || { curl $download_timeout -fsSL -o "$tmpdir/$filename" "$asset_url" || {
msg_error "Download failed: $asset_url" msg_error "Download failed: $asset_url"
rm -rf "$tmpdir" rm -rf "$tmpdir"
return 1 return 1
@@ -3116,7 +2889,7 @@ function fetch_and_deploy_codeberg_release() {
local target_file="$app" local target_file="$app"
[[ "$use_filename" == "true" ]] && target_file="$filename" [[ "$use_filename" == "true" ]] && target_file="$filename"
curl_download "$target/$target_file" "$asset_url" || { curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || {
msg_error "Download failed: $asset_url" msg_error "Download failed: $asset_url"
rm -rf "$tmpdir" rm -rf "$tmpdir"
return 1 return 1
@@ -3311,7 +3084,8 @@ function fetch_and_deploy_gh_release() {
local app_lc=$(echo "${app,,}" | tr -d ' ') local app_lc=$(echo "${app,,}" | tr -d ' ')
local version_file="$HOME/.${app_lc}" local version_file="$HOME/.${app_lc}"
local api_timeouts=(60 120 240) local api_timeout="--connect-timeout 10 --max-time 60"
local download_timeout="--connect-timeout 15 --max-time 900"
local current_version="" local current_version=""
[[ -f "$version_file" ]] && current_version=$(<"$version_file") [[ -f "$version_file" ]] && current_version=$(<"$version_file")
@@ -3331,37 +3105,18 @@ function fetch_and_deploy_gh_release() {
return 1 return 1
fi fi
local max_retries=${#api_timeouts[@]} retry_delay=2 attempt=1 success=false http_code local max_retries=3 retry_delay=2 attempt=1 success=false http_code
while ((attempt <= max_retries)); do while ((attempt <= max_retries)); do
http_code=$(curl --connect-timeout 10 --max-time "${api_timeouts[$((attempt - 1))]:-240}" -sSL -w "%{http_code}" -o /tmp/gh_rel.json "${header[@]}" "$api_url" 2>/dev/null) || true http_code=$(curl $api_timeout -sSL -w "%{http_code}" -o /tmp/gh_rel.json "${header[@]}" "$api_url" 2>/dev/null) || true
if [[ "$http_code" == "200" ]]; then if [[ "$http_code" == "200" ]]; then
success=true success=true
break break
elif [[ "$http_code" == "401" ]]; then
msg_error "GitHub API authentication failed (HTTP 401)."
if [[ -n "${GITHUB_TOKEN:-}" ]]; then
msg_error "Your GITHUB_TOKEN appears to be invalid or expired."
else
msg_error "The repository may require authentication."
fi
if prompt_for_github_token; then
header=(-H "Authorization: token $GITHUB_TOKEN")
continue
fi
break
elif [[ "$http_code" == "403" ]]; then elif [[ "$http_code" == "403" ]]; then
if ((attempt < max_retries)); then if ((attempt < max_retries)); then
msg_warn "GitHub API rate limit hit, retrying in ${retry_delay}s... (attempt $attempt/$max_retries)" msg_warn "GitHub API rate limit hit, retrying in ${retry_delay}s... (attempt $attempt/$max_retries)"
sleep "$retry_delay" sleep "$retry_delay"
retry_delay=$((retry_delay * 2)) retry_delay=$((retry_delay * 2))
else
msg_error "GitHub API rate limit exceeded (HTTP 403)."
if prompt_for_github_token; then
header=(-H "Authorization: token $GITHUB_TOKEN")
retry_delay=2
attempt=0
fi
fi fi
else else
sleep "$retry_delay" sleep "$retry_delay"
@@ -3370,10 +3125,21 @@ function fetch_and_deploy_gh_release() {
done done
if ! $success; then if ! $success; then
if [[ "$http_code" == "000" || -z "$http_code" ]]; then if [[ "$http_code" == "401" ]]; then
msg_error "GitHub API authentication failed (HTTP 401)."
if [[ -n "${GITHUB_TOKEN:-}" ]]; then
msg_error "Your GITHUB_TOKEN appears to be invalid or expired."
else
msg_error "The repository may require authentication. Try: export GITHUB_TOKEN=\"ghp_your_token\""
fi
elif [[ "$http_code" == "403" ]]; then
msg_error "GitHub API rate limit exceeded (HTTP 403)."
msg_error "To increase the limit, export a GitHub token before running the script:"
msg_error " export GITHUB_TOKEN=\"ghp_your_token_here\""
elif [[ "$http_code" == "000" || -z "$http_code" ]]; then
msg_error "GitHub API connection failed (no response)." msg_error "GitHub API connection failed (no response)."
msg_error "Check your network/DNS: curl -sSL https://api.github.com/rate_limit" msg_error "Check your network/DNS: curl -sSL https://api.github.com/rate_limit"
elif [[ "$http_code" != "401" ]]; then else
msg_error "Failed to fetch release metadata (HTTP $http_code)" msg_error "Failed to fetch release metadata (HTTP $http_code)"
fi fi
return 1 return 1
@@ -3408,7 +3174,7 @@ function fetch_and_deploy_gh_release() {
local direct_tarball_url="https://github.com/$repo/archive/refs/tags/$tag_name.tar.gz" local direct_tarball_url="https://github.com/$repo/archive/refs/tags/$tag_name.tar.gz"
filename="${app_lc}-${version_safe}.tar.gz" filename="${app_lc}-${version_safe}.tar.gz"
curl_download "$tmpdir/$filename" "$direct_tarball_url" || { curl $download_timeout -fsSL -o "$tmpdir/$filename" "$direct_tarball_url" || {
msg_error "Download failed: $direct_tarball_url" msg_error "Download failed: $direct_tarball_url"
rm -rf "$tmpdir" rm -rf "$tmpdir"
return 1 return 1
@@ -3511,7 +3277,7 @@ function fetch_and_deploy_gh_release() {
fi fi
filename="${url_match##*/}" filename="${url_match##*/}"
curl_download "$tmpdir/$filename" "$url_match" || { curl $download_timeout -fsSL -o "$tmpdir/$filename" "$url_match" || {
msg_error "Download failed: $url_match" msg_error "Download failed: $url_match"
rm -rf "$tmpdir" rm -rf "$tmpdir"
return 1 return 1
@@ -3578,7 +3344,7 @@ function fetch_and_deploy_gh_release() {
} }
filename="${asset_url##*/}" filename="${asset_url##*/}"
curl_download "$tmpdir/$filename" "$asset_url" || { curl $download_timeout -fsSL -o "$tmpdir/$filename" "$asset_url" || {
msg_error "Download failed: $asset_url" msg_error "Download failed: $asset_url"
rm -rf "$tmpdir" rm -rf "$tmpdir"
return 1 return 1
@@ -3699,7 +3465,7 @@ function fetch_and_deploy_gh_release() {
local target_file="$app" local target_file="$app"
[[ "$use_filename" == "true" ]] && target_file="$filename" [[ "$use_filename" == "true" ]] && target_file="$filename"
curl_download "$target/$target_file" "$asset_url" || { curl $download_timeout -fsSL -o "$target/$target_file" "$asset_url" || {
msg_error "Download failed: $asset_url" msg_error "Download failed: $asset_url"
rm -rf "$tmpdir" rm -rf "$tmpdir"
return 1 return 1
@@ -4256,8 +4022,6 @@ function setup_gs() {
# - NVIDIA requires matching host driver version # - NVIDIA requires matching host driver version
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
function setup_hwaccel() { function setup_hwaccel() {
local service_user="${1:-}"
# Check if user explicitly disabled GPU in advanced settings # Check if user explicitly disabled GPU in advanced settings
# ENABLE_GPU is exported from build.func # ENABLE_GPU is exported from build.func
if [[ "${ENABLE_GPU:-no}" == "no" ]]; then if [[ "${ENABLE_GPU:-no}" == "no" ]]; then
@@ -4509,7 +4273,7 @@ function setup_hwaccel() {
# ═══════════════════════════════════════════════════════════════════════════ # ═══════════════════════════════════════════════════════════════════════════
# Device Permissions # Device Permissions
# ═══════════════════════════════════════════════════════════════════════════ # ═══════════════════════════════════════════════════════════════════════════
_setup_gpu_permissions "$in_ct" "$service_user" _setup_gpu_permissions "$in_ct"
cache_installed_version "hwaccel" "1.0" cache_installed_version "hwaccel" "1.0"
msg_ok "Setup Hardware Acceleration" msg_ok "Setup Hardware Acceleration"
@@ -4676,8 +4440,9 @@ _setup_amd_gpu() {
fi fi
# Ubuntu includes AMD firmware in linux-firmware by default # Ubuntu includes AMD firmware in linux-firmware by default
# ROCm compute stack (OpenCL + HIP) # ROCm for compute (optional - large download)
_setup_rocm "$os_id" "$os_codename" # Uncomment if needed:
# $STD apt -y install rocm-opencl-runtime 2>/dev/null || true
msg_ok "AMD GPU configured" msg_ok "AMD GPU configured"
} }
@@ -4705,109 +4470,6 @@ _setup_amd_apu() {
msg_ok "AMD APU configured" msg_ok "AMD APU configured"
} }
# ══════════════════════════════════════════════════════════════════════════════
# AMD ROCm Compute Setup
# Adds ROCm repository and installs the ROCm compute stack for AMD GPUs/APUs.
# Provides: OpenCL, HIP, rocm-smi, rocminfo
# Supported: Debian 12/13, Ubuntu 22.04/24.04 (amd64 only)
# ══════════════════════════════════════════════════════════════════════════════
_setup_rocm() {
local os_id="$1" os_codename="$2"
# Only amd64 is supported
if [[ "$(dpkg --print-architecture 2>/dev/null)" != "amd64" ]]; then
msg_warn "ROCm is only available for amd64 — skipping"
return 0
fi
local ROCM_VERSION="7.2"
local ROCM_REPO_CODENAME
# Map OS codename to ROCm repository codename (Ubuntu-based repos)
case "${os_id}-${os_codename}" in
debian-bookworm) ROCM_REPO_CODENAME="jammy" ;;
debian-trixie | debian-sid) ROCM_REPO_CODENAME="noble" ;;
ubuntu-jammy) ROCM_REPO_CODENAME="jammy" ;;
ubuntu-noble) ROCM_REPO_CODENAME="noble" ;;
*)
msg_warn "ROCm not supported on ${os_id} ${os_codename} — skipping"
return 0
;;
esac
msg_info "Installing ROCm ${ROCM_VERSION} compute stack"
# ROCm main repository (userspace compute libs)
setup_deb822_repo \
"rocm" \
"https://repo.radeon.com/rocm/rocm.gpg.key" \
"https://repo.radeon.com/rocm/apt/${ROCM_VERSION}" \
"${ROCM_REPO_CODENAME}" \
"main" \
"amd64" || {
msg_warn "Failed to add ROCm repository — skipping ROCm"
return 0
}
# Note: The amdgpu/latest/ubuntu repo (kernel driver packages) is intentionally
# omitted — kernel drivers are managed by the Proxmox host, not the LXC container.
# Only the ROCm userspace compute stack is needed inside the container.
# Pin ROCm packages to prefer radeon repo
cat <<EOF >/etc/apt/preferences.d/rocm-pin-600
Package: *
Pin: release o=repo.radeon.com
Pin-Priority: 600
EOF
# apt update with retry — repo.radeon.com CDN can be mid-sync (transient size mismatches).
# Run with ERR trap disabled so a transient failure does not abort the entire install.
local _apt_ok=0
for _attempt in 1 2 3; do
if (
set +e
apt-get update -qq 2>&1
exit $?
) 2>/dev/null; then
_apt_ok=1
break
fi
msg_warn "apt update failed (attempt ${_attempt}/3) — AMD repo may be temporarily unavailable, retrying in 30s…"
sleep 30
done
if [[ $_apt_ok -eq 0 ]]; then
msg_warn "apt update still failing after 3 attempts — skipping ROCm install"
return 0
fi
# Install only runtime packages — full 'rocm' meta-package includes 15GB+ dev tools
$STD apt install -y rocm-opencl-runtime rocm-hip-runtime rocm-smi-lib 2>/dev/null || {
msg_warn "ROCm runtime install failed — trying minimal set"
$STD apt install -y rocm-opencl-runtime rocm-smi-lib 2>/dev/null || msg_warn "ROCm minimal install also failed"
}
# Group membership for GPU access
usermod -aG render,video root 2>/dev/null || true
# Environment (PATH + LD_LIBRARY_PATH)
if [[ -d /opt/rocm ]]; then
cat <<'ENVEOF' >/etc/profile.d/rocm.sh
export PATH="$PATH:/opt/rocm/bin"
export LD_LIBRARY_PATH="${LD_LIBRARY_PATH:+$LD_LIBRARY_PATH:}/opt/rocm/lib"
ENVEOF
chmod +x /etc/profile.d/rocm.sh
# Also make available for current session / systemd services
echo "/opt/rocm/lib" >/etc/ld.so.conf.d/rocm.conf
ldconfig 2>/dev/null || true
fi
if [[ -x /opt/rocm/bin/rocminfo ]]; then
msg_ok "ROCm ${ROCM_VERSION} installed"
else
msg_warn "ROCm installed but rocminfo not found — GPU may not be available in container"
fi
}
# ══════════════════════════════════════════════════════════════════════════════ # ══════════════════════════════════════════════════════════════════════════════
# NVIDIA GPU Setup # NVIDIA GPU Setup
# ══════════════════════════════════════════════════════════════════════════════ # ══════════════════════════════════════════════════════════════════════════════
@@ -4824,10 +4486,10 @@ _setup_nvidia_gpu() {
# Format varies by driver type: # Format varies by driver type:
# Proprietary: "NVRM version: NVIDIA UNIX x86_64 Kernel Module 550.54.14 Thu..." # Proprietary: "NVRM version: NVIDIA UNIX x86_64 Kernel Module 550.54.14 Thu..."
# Open: "NVRM version: NVIDIA UNIX Open Kernel Module for x86_64 590.48.01 Release..." # Open: "NVRM version: NVIDIA UNIX Open Kernel Module for x86_64 590.48.01 Release..."
# Use regex to extract version number (###.##.## or ###.## pattern) # Use regex to extract version number (###.##.## pattern)
local nvidia_host_version="" local nvidia_host_version=""
if [[ -f /proc/driver/nvidia/version ]]; then if [[ -f /proc/driver/nvidia/version ]]; then
nvidia_host_version=$(grep -oP '\d{3,}\.\d+(\.\d+)?' /proc/driver/nvidia/version 2>/dev/null | head -1) nvidia_host_version=$(grep -oP '\d{3,}\.\d+\.\d+' /proc/driver/nvidia/version 2>/dev/null | head -1)
fi fi
if [[ -z "$nvidia_host_version" ]]; then if [[ -z "$nvidia_host_version" ]]; then
@@ -5143,7 +4805,6 @@ EOF
# ══════════════════════════════════════════════════════════════════════════════ # ══════════════════════════════════════════════════════════════════════════════
_setup_gpu_permissions() { _setup_gpu_permissions() {
local in_ct="$1" local in_ct="$1"
local service_user="${2:-}"
# /dev/dri permissions (Intel/AMD) # /dev/dri permissions (Intel/AMD)
if [[ "$in_ct" == "0" && -d /dev/dri ]]; then if [[ "$in_ct" == "0" && -d /dev/dri ]]; then
@@ -5210,12 +4871,6 @@ _setup_gpu_permissions() {
chmod 666 /dev/kfd 2>/dev/null || true chmod 666 /dev/kfd 2>/dev/null || true
msg_info "AMD ROCm compute device configured" msg_info "AMD ROCm compute device configured"
fi fi
# Add service user to render and video groups for GPU hardware acceleration
if [[ -n "$service_user" ]]; then
$STD usermod -aG render "$service_user" 2>/dev/null || true
$STD usermod -aG video "$service_user" 2>/dev/null || true
fi
} }
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
@@ -5487,7 +5142,7 @@ current_ip="$(get_current_ip)"
if [[ -z "$current_ip" ]]; then if [[ -z "$current_ip" ]]; then
echo "[ERROR] Could not detect local IP" >&2 echo "[ERROR] Could not detect local IP" >&2
exit 123 exit 1
fi fi
if [[ -f "$IP_FILE" ]]; then if [[ -f "$IP_FILE" ]]; then
@@ -5988,20 +5643,20 @@ function setup_mongodb() {
# - Handles Debian Trixie libaio1t64 transition # - Handles Debian Trixie libaio1t64 transition
# #
# Variables: # Variables:
# USE_MYSQL_REPO - Use official MySQL repository (default: true) # USE_MYSQL_REPO - Set to "true" to use official MySQL repository
# Set to "false" to use distro packages instead # (default: false, uses distro packages)
# MYSQL_VERSION - MySQL version to install when using official repo # MYSQL_VERSION - MySQL version to install when using official repo
# (e.g. 8.0, 8.4) (default: 8.0) # (e.g. 8.0, 8.4) (default: 8.0)
# #
# Examples: # Examples:
# setup_mysql # Uses official MySQL repo, 8.0 # setup_mysql # Uses distro package (recommended)
# MYSQL_VERSION="8.4" setup_mysql # Specific version from MySQL repo # USE_MYSQL_REPO=true setup_mysql # Uses official MySQL repo
# USE_MYSQL_REPO=false setup_mysql # Uses distro package instead # USE_MYSQL_REPO=true MYSQL_VERSION="8.4" setup_mysql # Specific version
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
function setup_mysql() { function setup_mysql() {
local MYSQL_VERSION="${MYSQL_VERSION:-8.0}" local MYSQL_VERSION="${MYSQL_VERSION:-8.0}"
local USE_MYSQL_REPO="${USE_MYSQL_REPO:-true}" local USE_MYSQL_REPO="${USE_MYSQL_REPO:-false}"
local DISTRO_ID DISTRO_CODENAME local DISTRO_ID DISTRO_CODENAME
DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"') DISTRO_ID=$(awk -F= '/^ID=/{print $2}' /etc/os-release | tr -d '"')
DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release) DISTRO_CODENAME=$(awk -F= '/^VERSION_CODENAME=/{print $2}' /etc/os-release)

View File

@@ -2,7 +2,7 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# Author: MickLesk (CanbiZ) # Author: MickLesk (CanbiZ)
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/LICENSE # License: MIT | https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/LICENSE
# ============================================================================== # ==============================================================================
# VM-APP.FUNC - DEPLOY LXC APPLICATIONS INSIDE VIRTUAL MACHINES # VM-APP.FUNC - DEPLOY LXC APPLICATIONS INSIDE VIRTUAL MACHINES

View File

@@ -1,5 +1,5 @@
# Copyright (c) 2021-2026 community-scripts ORG # Copyright (c) 2021-2026 community-scripts ORG
# License: MIT | https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/LICENSE # License: MIT | https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/LICENSE
set -euo pipefail set -euo pipefail
SPINNER_PID="" SPINNER_PID=""
@@ -35,7 +35,7 @@ load_functions() {
get_header() { get_header() {
local app_name=$(echo "${APP,,}" | tr ' ' '-') local app_name=$(echo "${APP,,}" | tr ' ' '-')
local app_type=${APP_TYPE:-vm} local app_type=${APP_TYPE:-vm}
local header_url="https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/${app_type}/headers/${app_name}" local header_url="https://git.community-scripts.org/community-scripts/ProxmoxVED/raw/branch/main/${app_type}/headers/${app_name}"
local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}" local local_header_path="/usr/local/community-scripts/headers/${app_type}/${app_name}"
mkdir -p "$(dirname "$local_header_path")" mkdir -p "$(dirname "$local_header_path")"