Files
LuHost/utils/contentdb.js
Nathan Schneider 2d3b1166fe Fix server management issues and improve overall stability
Major server management fixes:
- Replace Flatpak-specific pkill with universal process tree termination using pstree + process.kill()
- Fix signal format errors (SIGTERM/SIGKILL instead of TERM/KILL strings)
- Add 5-second cooldown after server stop to prevent race conditions with external detection
- Enable Stop Server button for external servers in UI
- Implement proper timeout handling with process tree killing

ContentDB improvements:
- Fix download retry logic and "closed" error by preventing concurrent zip extraction
- Implement smart root directory detection and stripping during package extraction
- Add game-specific timeout handling (8s for VoxeLibre vs 3s for simple games)

World creation fixes:
- Make world creation asynchronous to prevent browser hangs
- Add WebSocket notifications for world creation completion status

Other improvements:
- Remove excessive debug logging
- Improve error handling and user feedback throughout the application
- Clean up temporary files and unnecessary logging

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-08-24 19:17:38 -06:00

475 lines
16 KiB
JavaScript

const axios = require('axios');
const fs = require('fs').promises;
const path = require('path');
const archiver = require('archiver');
const yauzl = require('yauzl');
const { promisify } = require('util');
class ContentDBClient {
constructor() {
this.baseURL = 'https://content.luanti.org/api';
this.client = axios.create({
baseURL: this.baseURL,
timeout: 30000,
headers: {
'User-Agent': 'LuHost/1.0',
'Accept': 'application/json'
},
validateStatus: (status) => {
// Only treat 200-299 as success, but don't throw on 404
return (status >= 200 && status < 300) || status === 404;
}
});
}
// Search packages (mods, games, texture packs)
async searchPackages(query = '', type = '', sort = 'score', order = 'desc', limit = 20, offset = 0) {
try {
const params = {
q: query,
type: type, // mod, game, txp (texture pack)
sort: sort, // score, name, created_at, approved_at, downloads
order: order, // asc, desc
limit: Math.min(limit, 50), // API limit
offset: offset
};
const response = await this.client.get('/packages/', { params });
return response.data;
} catch (error) {
throw new Error(`Failed to search ContentDB: ${error.message}`);
}
}
// Get package details
async getPackage(author, name) {
try {
const response = await this.client.get(`/packages/${author}/${name}/`);
// Ensure we got JSON back
if (typeof response.data !== 'object') {
throw new Error('Invalid response format from ContentDB');
}
return response.data;
} catch (error) {
if (error.response?.status === 404) {
throw new Error('Package not found');
}
// Handle cases where the response isn't JSON
if (error.message.includes('JSON') || error.message.includes('Unexpected token')) {
throw new Error('ContentDB returned invalid data format');
}
throw new Error(`Failed to get package details: ${error.message}`);
}
}
// Get package releases
async getPackageReleases(author, name) {
try {
const response = await this.client.get(`/packages/${author}/${name}/releases/`);
return response.data;
} catch (error) {
throw new Error(`Failed to get package releases: ${error.message}`);
}
}
// Download package
async downloadPackage(author, name, targetPath, version = null) {
try {
// Get package info first
const packageInfo = await this.getPackage(author, name);
// Get releases to find download URL
const releases = await this.getPackageReleases(author, name);
if (!releases || releases.length === 0) {
throw new Error('No releases found for this package');
}
// Find the specified version or use the latest
let release;
if (version) {
release = releases.find(r => r.id === version || r.title === version);
if (!release) {
throw new Error(`Version ${version} not found`);
}
} else {
// Use the first release (should be latest)
release = releases[0];
}
if (!release.url) {
throw new Error('No download URL found for this release');
}
// Construct full download URL if needed
let downloadUrl = release.url;
if (downloadUrl.startsWith('/')) {
downloadUrl = 'https://content.luanti.org' + downloadUrl;
}
// Download the package with retry logic
let downloadResponse;
let retryCount = 0;
const maxRetries = 3;
while (retryCount <= maxRetries) {
try {
console.log(`ContentDB: Attempting download from ${downloadUrl} (attempt ${retryCount + 1}/${maxRetries + 1})`);
downloadResponse = await axios.get(downloadUrl, {
responseType: 'stream',
timeout: 60000, // 1 minute timeout per attempt
headers: {
'User-Agent': 'LuHost/1.0',
'Accept': '*/*',
'Connection': 'keep-alive'
},
// Increase buffer limits to handle larger downloads
maxContentLength: 100 * 1024 * 1024, // 100MB
maxBodyLength: 100 * 1024 * 1024
});
break; // Success, exit retry loop
} catch (downloadError) {
retryCount++;
console.warn(`ContentDB: Download attempt ${retryCount} failed:`, downloadError.message);
if (retryCount > maxRetries) {
// All retries exhausted
const errorMsg = downloadError.code === 'ECONNRESET' || downloadError.message.includes('closed')
? 'Connection was closed by the server. This may be due to network issues or server load. Please try again later.'
: `Download failed: ${downloadError.message}`;
throw new Error(errorMsg);
}
// Wait before retrying (exponential backoff)
const delayMs = Math.pow(2, retryCount - 1) * 1000; // 1s, 2s, 4s
console.log(`ContentDB: Retrying in ${delayMs}ms...`);
await new Promise(resolve => setTimeout(resolve, delayMs));
}
}
// Create target directory
await fs.mkdir(targetPath, { recursive: true });
// If it's a zip file, extract it
if (release.url.endsWith('.zip')) {
const tempZipPath = path.join(targetPath, 'temp.zip');
// Save zip file temporarily
const writer = require('fs').createWriteStream(tempZipPath);
downloadResponse.data.pipe(writer);
await new Promise((resolve, reject) => {
writer.on('finish', resolve);
writer.on('error', reject);
});
// Extract zip file
try {
await this.extractZipFile(tempZipPath, targetPath);
console.log(`ContentDB: Successfully extracted zip to ${targetPath}`);
} catch (extractError) {
console.error(`ContentDB: Extraction failed:`, extractError);
// Clean up temp file before rethrowing
try {
await fs.unlink(tempZipPath);
} catch (cleanupError) {
console.warn(`ContentDB: Failed to cleanup temp file:`, cleanupError.message);
}
throw extractError;
}
// Remove temp zip file
try {
await fs.unlink(tempZipPath);
console.log(`ContentDB: Cleaned up temp zip file`);
} catch (cleanupError) {
console.warn(`ContentDB: Failed to remove temp zip file:`, cleanupError.message);
// Don't throw - extraction succeeded, cleanup failure is not critical
}
} else {
// For non-zip files, save directly
const fileName = path.basename(release.url) || 'download';
const filePath = path.join(targetPath, fileName);
const writer = require('fs').createWriteStream(filePath);
downloadResponse.data.pipe(writer);
await new Promise((resolve, reject) => {
writer.on('finish', resolve);
writer.on('error', reject);
});
}
return {
package: packageInfo,
release: release,
downloadPath: targetPath
};
} catch (error) {
throw new Error(`Failed to download package: ${error.message}`);
}
}
// Extract zip file
async extractZipFile(zipPath, targetPath) {
return new Promise((resolve, reject) => {
yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
if (err) {
reject(err);
return;
}
const entries = [];
// First pass: collect all entries to analyze structure
zipfile.on('entry', (entry) => {
entries.push(entry);
zipfile.readEntry();
});
zipfile.on('end', async () => {
try {
// Analyze if we have a common root directory that should be stripped
let commonRoot = null;
let shouldStripRoot = false;
if (entries.length > 0) {
// Find files (not directories) to analyze structure
const fileEntries = entries.filter(e => !e.fileName.endsWith('/') && e.fileName.trim() !== '');
if (fileEntries.length > 0) {
// Check if all files are in the same top-level directory
const firstPath = fileEntries[0].fileName;
const firstSlash = firstPath.indexOf('/');
if (firstSlash > 0) {
const potentialRoot = firstPath.substring(0, firstSlash);
// Check if ALL file entries start with this root directory
const allInSameRoot = fileEntries.every(entry =>
entry.fileName.startsWith(potentialRoot + '/')
);
if (allInSameRoot) {
commonRoot = potentialRoot;
shouldStripRoot = true;
console.log(`ContentDB: Detected common root directory "${commonRoot}", will strip it during extraction`);
}
}
}
}
zipfile.close();
// Second pass: reopen zip and extract files sequentially
yauzl.open(zipPath, { lazyEntries: true }, (reopenErr, newZipfile) => {
if (reopenErr) {
reject(reopenErr);
return;
}
let entryIndex = 0;
const processNextEntry = () => {
if (entryIndex >= entries.length) {
newZipfile.close();
resolve();
return;
}
const entry = entries[entryIndex++];
let fileName = entry.fileName;
// Strip common root if detected
if (shouldStripRoot && commonRoot) {
if (fileName === commonRoot || fileName === commonRoot + '/') {
processNextEntry(); // Skip the root directory itself
return;
}
if (fileName.startsWith(commonRoot + '/')) {
fileName = fileName.substring(commonRoot.length + 1);
}
}
// Skip empty filenames
if (!fileName || fileName.trim() === '') {
processNextEntry();
return;
}
const entryPath = path.join(targetPath, fileName);
// Ensure the entry path is within target directory (security)
const normalizedPath = path.normalize(entryPath);
if (!normalizedPath.startsWith(path.normalize(targetPath))) {
processNextEntry();
return;
}
if (fileName.endsWith('/')) {
// Directory entry
fs.mkdir(normalizedPath, { recursive: true })
.then(() => processNextEntry())
.catch(reject);
} else {
// File entry
newZipfile.openReadStream(entry, async (streamErr, readStream) => {
if (streamErr) {
newZipfile.close();
reject(streamErr);
return;
}
try {
// Ensure parent directory exists
const parentDir = path.dirname(normalizedPath);
await fs.mkdir(parentDir, { recursive: true });
const writeStream = require('fs').createWriteStream(normalizedPath);
readStream.pipe(writeStream);
writeStream.on('finish', () => {
processNextEntry();
});
writeStream.on('error', (writeError) => {
newZipfile.close();
reject(writeError);
});
} catch (mkdirError) {
newZipfile.close();
reject(mkdirError);
}
});
}
};
newZipfile.on('error', (zipError) => {
newZipfile.close();
reject(zipError);
});
processNextEntry();
});
} catch (error) {
zipfile.close();
reject(error);
}
});
zipfile.on('error', (error) => {
zipfile.close();
reject(error);
});
zipfile.readEntry();
});
});
}
// Get popular packages
async getPopularPackages(type = '', limit = 10) {
return this.searchPackages('', type, 'downloads', 'desc', limit, 0);
}
// Get recently updated packages
async getRecentPackages(type = '', limit = 10) {
return this.searchPackages('', type, 'approved_at', 'desc', limit, 0);
}
// Check for updates for installed packages
async checkForUpdates(installedPackages) {
const updates = [];
for (const pkg of installedPackages) {
try {
// Try to find the package on ContentDB
// This requires matching local package names to ContentDB packages
// which might not always be straightforward
// For now, we'll implement a basic search-based approach
const searchResults = await this.searchPackages(pkg.name, '', 'score', 'desc', 5);
if (searchResults && searchResults.length > 0) {
// Try to find exact match
const match = searchResults.find(result =>
result.name.toLowerCase() === pkg.name.toLowerCase() ||
result.title.toLowerCase() === pkg.name.toLowerCase()
);
if (match) {
const releases = await this.getPackageReleases(match.author, match.name);
if (releases && releases.length > 0) {
updates.push({
local: pkg,
remote: match,
latestRelease: releases[0],
hasUpdate: true // We could implement version comparison here
});
}
}
}
} catch (error) {
// Skip packages that can't be found or checked
console.warn(`Could not check updates for ${pkg.name}:`, error.message);
}
}
return updates;
}
// Get package dependencies
async getPackageDependencies(author, name) {
try {
const packageInfo = await this.getPackage(author, name);
return {
hard_dependencies: packageInfo.hard_dependencies || [],
optional_dependencies: packageInfo.optional_dependencies || []
};
} catch (error) {
throw new Error(`Failed to get dependencies: ${error.message}`);
}
}
// Install package with dependencies
async installPackageWithDeps(author, name, targetBasePath, resolveDeps = true) {
const installResults = {
main: null,
dependencies: [],
errors: []
};
try {
// Install main package
const mainPackagePath = path.join(targetBasePath, name);
const mainResult = await this.downloadPackage(author, name, mainPackagePath);
installResults.main = mainResult;
// Install dependencies if requested
if (resolveDeps) {
const deps = await this.getPackageDependencies(author, name);
for (const dep of deps.hard_dependencies) {
try {
const depPath = path.join(targetBasePath, dep.name);
const depResult = await this.downloadPackage(dep.author, dep.name, depPath);
installResults.dependencies.push(depResult);
} catch (error) {
installResults.errors.push(`Failed to install dependency ${dep.name}: ${error.message}`);
}
}
}
return installResults;
} catch (error) {
installResults.errors.push(error.message);
return installResults;
}
}
}
module.exports = ContentDBClient;