diff --git a/index.mjs b/index.mjs index 80d049f7..5ae1d1cd 100644 --- a/index.mjs +++ b/index.mjs @@ -1,48 +1,29 @@ -import cluster from "cluster"; -import os from "os"; -import net from "net"; -import fs from "fs"; -import path from "path"; -import { spawnSync } from "child_process"; -import express from "express"; -import { createServer } from "http"; -import compression from "compression"; -import WebSocket from "ws"; -import { LRUCache } from "lru-cache"; -import { baremuxPath } from "@mercuryworkshop/bare-mux/node"; -import { epoxyPath } from "@mercuryworkshop/epoxy-transport"; -import { libcurlPath } from "@mercuryworkshop/libcurl-transport"; -import { uvPath } from "@titaniumnetwork-dev/ultraviolet"; -import wisp from "wisp-server-node"; +import cluster from 'cluster'; +import compression from 'compression'; +import express from 'express'; +import fs from 'fs'; +import { createServer } from 'http'; +import { spawnSync } from 'child_process'; +import net from 'net'; +import os from 'os'; +import path from 'path'; +import WebSocket from 'ws'; +import { LRUCache } from 'lru-cache'; +import wisp from 'wisp-server-node'; -const surgeConfigPath = path.resolve("surge.config.json"); -const isSurgedRun = process.argv.includes("--surged"); +import { baremuxPath } from '@mercuryworkshop/bare-mux/node'; +import { epoxyPath } from '@mercuryworkshop/epoxy-transport'; +import { libcurlPath } from '@mercuryworkshop/libcurl-transport'; +import { uvPath } from '@titaniumnetwork-dev/ultraviolet'; + +import './others/scaler.mjs'; +import './others/warmup.mjs'; + +const surgeConfigPath = path.resolve('surge.config.json'); +const isSurgedRun = process.argv.includes('--surged'); +const PORT = parseInt(process.env.PORT || '3000', 10); let startTime = Date.now(); -function applySurgeAndRestartIfNeeded() { - if (isSurgedRun) { - try { - const config = JSON.parse(fs.readFileSync(surgeConfigPath, "utf-8")); - process.env.UV_THREADPOOL_SIZE = String(config.uvThreadpoolSize); - } catch {} - return; - } - - const result = spawnSync("node", ["./others/surge.mjs"], { stdio: "inherit" }); - if (result.error) process.exit(1); - - const config = JSON.parse(fs.readFileSync(surgeConfigPath, "utf-8")); - const nodeArgs = config.nodeFlags.concat([path.resolve("index.mjs"), "--surged"]); - const env = { - ...process.env, - UV_THREADPOOL_SIZE: String(config.uvThreadpoolSize), - ALREADY_SURGED: "true" - }; - - const relaunch = spawnSync(process.execPath, nodeArgs, { stdio: "inherit", env }); - process.exit(relaunch.status || 0); -} - applySurgeAndRestartIfNeeded(); if (global.gc) { @@ -52,111 +33,111 @@ if (global.gc) { }, 60000); } -import "./others/scaler.mjs"; -import "./others/warmup.mjs"; +function logInfo(msg) { console.info(`[~] ${msg}`); } +function logSuccess(msg) { console.info(`[+] ${msg}`); } +function logError(err) { console.error(`[!] ${err instanceof Error ? err.message : err}`); } -const cache = new LRUCache({ - maxSize: 1000, - ttl: 345600000, - allowStale: false, - sizeCalculation: (value, key) => Buffer.byteLength(value) + Buffer.byteLength(key) -}); +process.on('uncaughtException', err => logError(`Unhandled Exception: ${err}`)); +process.on('unhandledRejection', reason => logError(`Unhandled Rejection: ${reason}`)); -const port = parseInt(process.env.PORT || "3000", 10); +function applySurgeAndRestartIfNeeded() { + if (isSurgedRun) { + try { + const config = JSON.parse(fs.readFileSync(surgeConfigPath, 'utf-8')); + process.env.UV_THREADPOOL_SIZE = String(config.uvThreadpoolSize); + } catch {} + return; + } -function logInfo(msg) { - console.info(`[~] ${msg}`); + const prep = spawnSync(process.execPath, ['./others/surge.mjs'], { stdio: 'inherit' }); + if (prep.error) process.exit(1); + + const { nodeFlags, uvThreadpoolSize } = JSON.parse(fs.readFileSync(surgeConfigPath, 'utf-8')); + const args = [...nodeFlags, path.resolve('index.mjs'), '--surged']; + const env = { ...process.env, UV_THREADPOOL_SIZE: String(uvThreadpoolSize), ALREADY_SURGED: 'true' }; + + const relaunch = spawnSync(process.execPath, args, { stdio: 'inherit', env }); + process.exit(relaunch.status || 0); } -function logSuccess(msg) { - console.info(`[+] ${msg}`); -} - -function logError(err) { - console.error(`[!] ${err instanceof Error ? err.message : err}`); -} - -process.on("uncaughtException", err => logError(`Unhandled Exception: ${err}`)); -process.on("unhandledRejection", reason => logError(`Unhandled Rejection: ${reason}`)); - if (cluster.isPrimary) { - const cpus = os.cpus().length; - const workers = Math.max(1, cpus - 1); + const cpuCount = Math.max(1, os.cpus().length - 1); + logInfo(`Master: forking ${cpuCount} workers`); - logInfo(`Master: forking ${workers} workers`); - - for (let i = 0; i < workers; i++) { + for (let i = 0; i < cpuCount; i++) { cluster.fork(); } - cluster.on("exit", worker => { + cluster.on('exit', worker => { logError(`Worker ${worker.process.pid} exited. Restarting...`); cluster.fork(); }); - let current = 0; - const server = net.createServer({ pauseOnConnect: true }, conn => { - const workersArr = Object.values(cluster.workers); - if (!workersArr.length) return conn.destroy(); - const worker = workersArr[current++ % workersArr.length]; - worker.send("sticky-session:connection", conn); + const balancer = net.createServer({ pauseOnConnect: true }, conn => { + const workers = Object.values(cluster.workers); + if (workers.length === 0) return conn.destroy(); + const worker = workers[(balancer.current = (balancer.current + 1 || 0)) % workers.length]; + worker.send('sticky-session:connection', conn); }); - server.on("error", err => logError(`Server error: ${err}`)); - server.listen(port, () => logSuccess(`Server listening on ${port}`)); + balancer.on('error', err => logError(`Balancer error: ${err}`)); + balancer.listen(PORT, () => logSuccess(`Balancer listening on ${PORT}`)); } else { - const __dirname = process.cwd(); - const publicPath = path.join(__dirname, "public"); const app = express(); - let latencySamples = []; + const publicPath = path.join(process.cwd(), 'public'); + const cache = new LRUCache({ + maxSize: 1000, + ttl: 345600000, + sizeCalculation: (value, key) => Buffer.byteLength(value) + Buffer.byteLength(key) + }); + const latencySamples = []; app.use(compression({ level: 4, memLevel: 4, threshold: 1024 })); app.use((req, res, next) => { const key = req.originalUrl; - const val = cache.get(key); - if (val) { - res.setHeader("X-Cache", "HIT"); - return res.send(val); + const cached = cache.get(key); + if (cached) { + res.setHeader('X-Cache', 'HIT'); + return res.send(cached); } + res.sendResponse = res.send.bind(res); res.send = body => { cache.set(key, body); - res.setHeader("X-Cache", "MISS"); + res.setHeader('X-Cache', 'MISS'); res.sendResponse(body); }; next(); }); - const staticOpts = { maxAge: "7d", immutable: true }; - app.use("/baremux/", express.static(baremuxPath, staticOpts)); - app.use("/epoxy/", express.static(epoxyPath, staticOpts)); - app.use("/libcurl/", express.static(libcurlPath, staticOpts)); + const staticOpts = { maxAge: '7d', immutable: true }; + app.use('/baremux', express.static(baremuxPath, staticOpts)); + app.use('/epoxy', express.static(epoxyPath, staticOpts)); + app.use('/libcurl', express.static(libcurlPath, staticOpts)); + app.use('/wah', express.static(uvPath, staticOpts)); app.use(express.static(publicPath, staticOpts)); - app.use("/wah/", express.static(uvPath, staticOpts)); app.use(express.json()); const sendHtml = file => (_req, res) => res.sendFile(path.join(publicPath, file)); - app.get('/', sendHtml('$.html')); app.get('/g', sendHtml('!.html')); app.get('/a', sendHtml('!!.html')); - app.get('/resent', (_req, res) => res.sendFile(path.join(publicPath, 'resent', 'index.html'))); + app.get('/resent', (_req, res) => res.sendFile(path.join(publicPath, 'resent/index.html'))); app.get('/api/info', (_req, res) => { try { - const average = latencySamples.length > 0 - ? latencySamples.reduce((a, b) => a + b, 0) / latencySamples.length + const avg = latencySamples.length + ? latencySamples.reduce((sum, v) => sum + v, 0) / latencySamples.length : 0; let speed = 'Medium'; - if (average < 200) speed = 'Fast'; - else if (average > 500) speed = 'Slow'; - const cpus = os.cpus(); - const totalMem = os.totalmem() / 1024 / 1024 / 1024; + if (avg < 200) speed = 'Fast'; + else if (avg > 500) speed = 'Slow'; + res.json({ speed, - averageLatency: average.toFixed(2), - specs: `${cpus[0].model} + ${cpus.length} CPU Cores + ${totalMem.toFixed(1)}GB of RAM`, + averageLatency: avg.toFixed(2), + specs: `${os.cpus()[0].model} + ${os.cpus().length} cores + ${(os.totalmem() / 1e9).toFixed(1)}GB RAM`, startTime, samples: latencySamples.length, timestamp: Date.now() @@ -168,34 +149,40 @@ if (cluster.isPrimary) { app.get('/api/github-updates', async (_req, res) => { try { - const ghRes = await fetch('https://api.github.com/repos/xojw/waves/commits?per_page=5', { - headers: { - 'User-Agent': 'waves-app', - 'Accept': 'application/vnd.github.v3+json' + const ghRes = await fetch( + 'https://api.github.com/repos/xojw/waves/commits?per_page=1', + { + headers: { + 'User-Agent': 'waves-app', + 'Accept': 'application/vnd.github.v3+json' + } + } + ); + if (!ghRes.ok) { + return res.status(ghRes.status).json({ error: 'GitHub API error' }); + } + + const [latest] = await ghRes.json(); + const then = new Date(latest.commit.author.date).getTime(); + const diff = Date.now() - then; + const mins = Math.round(diff / 60000); + const hrs = Math.round(diff / 3600000); + const ago = hrs >= 1 + ? `${hrs} hour${hrs > 1 ? 's' : ''} ago` + : `${mins} minute${mins !== 1 ? 's' : ''} ago`; + + res.json({ + repo: 'xojw/waves', + update: { + sha: latest.sha.slice(0, 7), + author: latest.commit.author.name, + message: latest.commit.message.split('\n')[0], + date: latest.commit.author.date, + ago } }); - if (!ghRes.ok) return res.status(ghRes.status).json({ error: 'GitHub API error' }); - - const commits = await ghRes.json(); - const now = Date.now(); - - const updates = commits.map(c => { - const dateMs = new Date(c.commit.author.date).getTime(); - const diffMs = now - dateMs; - const diffMins = Math.round(diffMs / 60000); - const diffHours = Math.round(diffMs / 3600000); - const ago = diffHours > 1 ? `${diffHours} hours ago` : `${diffMins} minutes ago`; - return { - sha: c.sha.slice(0, 7), - message: c.commit.message.split('\n')[0], - author: c.commit.author.name, - date: c.commit.author.date, - ago - }; - }); - - res.json({ repo: 'xojw/waves', updates }); - } catch { + } catch (err) { + logError(err); res.status(500).json({ error: 'Internal server error' }); } }); @@ -206,55 +193,56 @@ if (cluster.isPrimary) { server.keepAliveTimeout = 0; server.headersTimeout = 0; - const pingWSS = new WebSocket.Server({ noServer: true, maxPayload: 4194304, perMessageDeflate: false }); + const pingWSS = new WebSocket.Server({ + noServer: true, + maxPayload: 4194304, + perMessageDeflate: false + }); - pingWSS.on("connection", (ws, req) => { + pingWSS.on('connection', (ws, req) => { const remote = req.socket.remoteAddress || 'unknown'; let lat = []; - const interval = setInterval(() => { - if (ws.readyState === WebSocket.OPEN) ws.send(JSON.stringify({ type: 'ping', timestamp: Date.now() })); + + const ticker = setInterval(() => { + if (ws.readyState === WebSocket.OPEN) { + ws.send(JSON.stringify({ type: 'ping', timestamp: Date.now() })); + } }, 1000); ws.on('message', msg => { try { - const data = JSON.parse(msg); - if (data.type === 'pong' && data.timestamp) { - const d = Date.now() - data.timestamp; + const { type, timestamp } = JSON.parse(msg); + if (type === 'pong' && timestamp) { + const d = Date.now() - timestamp; lat.push(d); - if (lat.length > 5) lat.shift(); latencySamples.push(d); - if (latencySamples.length > 100) latencySamples.shift(); - ws.send(JSON.stringify({ type: 'latency', latency: d })); + if (lat.length > 5) lat.shift(); } - } catch(e) { - logError(`Ping error: ${e}`); - } + } catch {} }); ws.on('close', () => { - clearInterval(interval); - const avg = lat.length ? (lat.reduce((a,b)=>a+b)/lat.length).toFixed(2) : 0; - logInfo(`WS ${remote} closed. Avg: ${avg}ms`); + clearInterval(ticker); }); }); - server.on('upgrade', (req, sock, head) => { - if (req.url === '/w/ping') { - pingWSS.handleUpgrade(req, sock, head, ws => pingWSS.emit('connection', ws, req)); - } else if (req.url.startsWith('/w/')) { - wisp.routeRequest(req, sock, head); + server.on('upgrade', (req, socket, head) => { + if (req.url === '/ws/ping') { + pingWSS.handleUpgrade(req, socket, head, ws => { + pingWSS.emit('connection', ws, req); + }); } else { - sock.end(); + socket.destroy(); } }); - server.on('error', err => logError(`Worker error: ${err}`)); - server.listen(0, () => logSuccess(`Worker ${process.pid} ready`)); + server.listen(0, () => { + logSuccess(`Worker ${process.pid} ready`); + }); process.on('message', (msg, conn) => { - if (msg === 'sticky-session:connection' && conn) { - server.emit('connection', conn); - conn.resume(); - } + if (msg !== 'sticky-session:connection') return; + server.emit('connection', conn); + conn.resume(); }); } \ No newline at end of file