Skip to content
88 changes: 88 additions & 0 deletions benchmark/http/heap-profiler-labels.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
'use strict';

// Benchmark: HTTP server throughput impact of heap profiler with labels.
//
// Measures requests/sec across three modes:
// - none: no profiler (baseline)
// - sampling: profiler active, no labels
// - sampling-with-labels: profiler active with labels via withHeapProfileLabels
//
// Workload per request: ~100KB V8 heap (JSON parse/stringify) + ~50KB Buffer
// to exercise both HeapProfileLabelsCallback and ProfilingArrayBufferAllocator.
//
// Run with compare.js:
// node benchmark/compare.js --old ./out/Release/node --new ./out/Release/node \
// --runs 10 --filter heap-profiler-labels --set c=50 -- http

const common = require('../common.js');
const { PORT } = require('../_http-benchmarkers.js');
const v8 = require('v8');

const bench = common.createBenchmark(main, {
mode: ['none', 'sampling', 'sampling-with-labels'],
c: [50],
duration: 10,
});

// Build a ~100KB realistic JSON payload template (API response shape).
const items = [];
for (let i = 0; i < 200; i++) {
items.push({
id: i,
name: `user-${i}`,
email: `user${i}@example.com`,
role: 'admin',
metadata: { created: '2024-01-01', tags: ['a', 'b', 'c'] },
});
}
const payloadTemplate = JSON.stringify({ data: items, total: 200 });

function main({ mode, c, duration }) {
const http = require('http');

const interval = 512 * 1024; // 512KB — V8 default, production-realistic.

if (mode !== 'none') {
v8.startSamplingHeapProfiler(interval);
}

const server = http.createServer((req, res) => {
const handler = () => {
// Realistic mixed workload:
// 1. ~100KB V8 heap: JSON parse + stringify (simulates API response building)
const parsed = JSON.parse(payloadTemplate);
parsed.requestId = Math.random();
const body = JSON.stringify(parsed);

// 2. ~50KB Buffer (simulates response buffering / crypto / compression)
const buf = Buffer.alloc(50 * 1024, 0x42);

// Keep buf reference alive until response is sent.
res.writeHead(200, {
'Content-Type': 'application/json',
'Content-Length': body.length,
'X-Buf-Check': buf[0],
});
res.end(body);
};

if (mode === 'sampling-with-labels') {
v8.withHeapProfileLabels({ route: req.url }, handler);
} else {
handler();
}
});

server.listen(PORT, () => {
bench.http({
path: '/api/bench',
connections: c,
duration,
}, () => {
if (mode !== 'none') {
v8.stopSamplingHeapProfiler();
}
server.close();
});
});
}
150 changes: 150 additions & 0 deletions benchmark/http/heap-profiler-realistic.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
'use strict';

// Benchmark: realistic app-server + DB-server heap profiler overhead.
//
// Architecture: wrk → [App Server :PORT] → [DB Server :PORT+1]
//
// The app server fetches JSON rows from the DB server, parses,
// sums two columns over all rows, and returns the result. This exercises:
// - http.get (async I/O + Buffer allocation for response body)
// - JSON.parse of realistic DB response (V8 heap allocation)
// - Two iteration passes over rows (intermediate values)
// - ALS label propagation across async I/O boundary
//
// Run with compare.js for statistical significance:
// node benchmark/compare.js --old ./out/Release/node --new ./out/Release/node \
// --runs 30 --filter heap-profiler-realistic --set rows=1000 -- http

const common = require('../common.js');
const { PORT } = require('../_http-benchmarkers.js');
const v8 = require('v8');
const http = require('http');

const DB_PORT = PORT + 1;

const bench = common.createBenchmark(main, {
mode: ['none', 'sampling', 'sampling-with-labels'],
rows: [100, 1000],
c: [50],
duration: 10,
});

// --- DB Server: pre-built JSON responses keyed by row count ---

function buildDBResponse(n) {
const categories = ['electronics', 'clothing', 'food', 'books', 'tools'];
const rows = [];
for (let i = 0; i < n; i++) {
rows.push({
id: i,
amount: Math.round(Math.random() * 10000) / 100,
quantity: Math.floor(Math.random() * 500),
name: `user-${String(i).padStart(6, '0')}`,
email: `user${i}@example.com`,
category: categories[i % categories.length],
});
}
const body = JSON.stringify({ rows, total: n });
return { body, len: Buffer.byteLength(body) };
}

// --- App Server helpers ---

function fetchFromDB(rows) {
return new Promise((resolve, reject) => {
const req = http.get(
`http://127.0.0.1:${DB_PORT}/?rows=${rows}`,
(res) => {
const chunks = [];
res.on('data', (chunk) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString()));
} catch (e) {
reject(e);
}
});
},
);
req.on('error', reject);
});
}

function processRows(data) {
const { rows } = data;
// Two passes — simulates light business logic (column aggregation).
let totalAmount = 0;
for (let i = 0; i < rows.length; i++) {
totalAmount += rows[i].amount;
}
let totalQuantity = 0;
for (let i = 0; i < rows.length; i++) {
totalQuantity += rows[i].quantity;
}
return {
totalAmount: Math.round(totalAmount * 100) / 100,
totalQuantity,
count: rows.length,
};
}

function main({ mode, rows, c, duration }) {
// Pre-build DB responses.
const dbResponses = {};
for (const n of [100, 1000]) {
dbResponses[n] = buildDBResponse(n);
}

// Start DB server.
const dbServer = http.createServer((req, res) => {
const url = new URL(req.url, `http://127.0.0.1:${DB_PORT}`);
const n = parseInt(url.searchParams.get('rows') || '1000', 10);
const resp = dbResponses[n] || dbResponses[1000];
res.writeHead(200, {
'Content-Type': 'application/json',
'Content-Length': resp.len,
});
res.end(resp.body);
});

dbServer.listen(DB_PORT, () => {
const interval = 512 * 1024;
if (mode !== 'none') {
v8.startSamplingHeapProfiler(interval);
}

// Start app server.
const appServer = http.createServer((req, res) => {
const handler = async () => {
const data = await fetchFromDB(rows);
const result = processRows(data);
const body = JSON.stringify(result);
res.writeHead(200, {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(body),
});
res.end(body);
};

if (mode === 'sampling-with-labels') {
v8.withHeapProfileLabels({ route: req.url }, handler);
} else {
handler();
}
});

appServer.listen(PORT, () => {
bench.http({
path: '/api/data',
connections: c,
duration,
}, () => {
if (mode !== 'none') {
v8.stopSamplingHeapProfiler();
}
appServer.close();
dbServer.close();
});
});
});
}
59 changes: 59 additions & 0 deletions benchmark/v8/heap-profiler-labels.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
'use strict';

// Benchmark: overhead of V8 sampling heap profiler with and without labels.
//
// Measures per-allocation cost across three modes:
// - none: no profiler running (baseline)
// - sampling: profiler active, no labels callback
// - sampling-with-labels: profiler active with labels via withHeapProfileLabels
//
// Run standalone:
// node benchmark/v8/heap-profiler-labels.js
//
// Run with compare.js for statistical analysis:
// node benchmark/compare.js --old ./node-baseline --new ./node-with-labels \
// --filter heap-profiler-labels

const common = require('../common.js');
const v8 = require('v8');

const bench = common.createBenchmark(main, {
mode: ['none', 'sampling', 'sampling-with-labels'],
n: [1e6],
});

function main({ mode, n }) {
const interval = 512 * 1024; // 512KB — V8 default, production-realistic.

if (mode === 'sampling') {
v8.startSamplingHeapProfiler(interval);
} else if (mode === 'sampling-with-labels') {
v8.startSamplingHeapProfiler(interval);
}

if (mode === 'sampling-with-labels') {
v8.withHeapProfileLabels({ route: '/bench' }, () => {
runWorkload(n);
});
} else {
runWorkload(n);
}

if (mode !== 'none') {
v8.stopSamplingHeapProfiler();
}
}

function runWorkload(n) {
const arr = [];
bench.start();
for (let i = 0; i < n; i++) {
// Allocate objects with string properties — representative of JSON API
// workloads. Each object is ~100-200 bytes on the V8 heap.
arr.push({ id: i, name: `item-${i}`, value: Math.random() });
// Prevent unbounded growth — keep last 1000 to maintain GC pressure
// without running out of memory.
if (arr.length > 1000) arr.shift();
}
bench.end(n);
}
Loading