From 429359b29fd41f345a089a082c56c25564c7b9ca Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sun, 30 Nov 2025 21:01:06 +0300 Subject: [PATCH 01/13] lib: added logger api in node core --- benchmark/logger/basic-json.js | 53 ++ benchmark/logger/vs-pino.js | 146 ++++++ doc/api/index.md | 1 + doc/api/logger.md | 10 + lib/internal/logger/serializers.js | 72 +++ lib/logger.js | 584 +++++++++++++++++++++++ test/parallel/test-log-basic.js | 500 +++++++++++++++++++ test/parallel/test-logger-serializers.js | 194 ++++++++ 8 files changed, 1560 insertions(+) create mode 100644 benchmark/logger/basic-json.js create mode 100644 benchmark/logger/vs-pino.js create mode 100644 doc/api/logger.md create mode 100644 lib/internal/logger/serializers.js create mode 100644 lib/logger.js create mode 100644 test/parallel/test-log-basic.js create mode 100644 test/parallel/test-logger-serializers.js diff --git a/benchmark/logger/basic-json.js b/benchmark/logger/basic-json.js new file mode 100644 index 00000000000000..ee29699bdf7837 --- /dev/null +++ b/benchmark/logger/basic-json.js @@ -0,0 +1,53 @@ +'use strict'; + +const common = require('../common'); +const { createLogger, JSONHandler } = require('node:logger'); +const fs = require('node:fs'); + +const bench = common.createBenchmark(main, { + n: [1e5], + level: ['info', 'debug'], + fields: [0, 5], + type: ['simple', 'child', 'disabled'], +}); + +function main({ n, level, fields, type }) { + // Use /dev/null to avoid I/O overhead in benchmarks + const nullFd = fs.openSync('/dev/null', 'w'); + const handler = new JSONHandler({ stream: nullFd, level: 'info' }); + const logger = createLogger({ handler, level }); + + // Create test data based on fields count + const logData = { msg: 'benchmark test message' }; + for (let i = 0; i < fields; i++) { + logData[`field${i}`] = `value${i}`; + } + + let testLogger; + switch (type) { + case 'simple': + testLogger = logger; + break; + case 'child': + testLogger = logger.child({ requestId: 'bench-123', userId: 456 }); + break; + case 'disabled': { + // When level is debug and handler is info, logs will be disabled + const nullFd2 = fs.openSync('/dev/null', 'w'); + + testLogger = createLogger({ + handler: new JSONHandler({ stream: nullFd2, level: 'warn' }), + level: 'debug', + }); + break; + } + } + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info(logData); + } + bench.end(n); + + handler.end(); +} diff --git a/benchmark/logger/vs-pino.js b/benchmark/logger/vs-pino.js new file mode 100644 index 00000000000000..30a0d82c86ec5e --- /dev/null +++ b/benchmark/logger/vs-pino.js @@ -0,0 +1,146 @@ +'use strict'; + +const common = require('../common'); +const fs = require('node:fs'); + +const bench = common.createBenchmark(main, { + n: [1e5], + logger: ['node-logger', 'pino'], + scenario: ['simple', 'child', 'disabled', 'fields'], +}); + +function main({ n, logger, scenario }) { + const nullFd = fs.openSync('/dev/null', 'w'); + let testLogger; + let consumer; + + if (logger === 'node-logger') { + const { createLogger, JSONConsumer } = require('logger'); + + switch (scenario) { + case 'simple': { + consumer = new JSONConsumer({ stream: nullFd, level: 'info' }); + consumer.attach(); + testLogger = createLogger({ level: 'info' }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'child': { + consumer = new JSONConsumer({ stream: nullFd, level: 'info' }); + consumer.attach(); + const baseLogger = createLogger({ level: 'info' }); + testLogger = baseLogger.child({ requestId: 'req-123', userId: 456 }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'disabled': { + consumer = new JSONConsumer({ stream: nullFd, level: 'warn' }); + consumer.attach(); + testLogger = createLogger({ level: 'warn' }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.debug('benchmark test message'); + } + bench.end(n); + break; + } + + case 'fields': { + consumer = new JSONConsumer({ stream: nullFd, level: 'info' }); + consumer.attach(); + testLogger = createLogger({ level: 'info' }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message', { + field1: 'value1', + field2: 'value2', + field3: 'value3', + field4: 'value4', + field5: 'value5', + }); + } + bench.end(n); + break; + } + } + + if (consumer) { + consumer.flushSync(); + } + fs.closeSync(nullFd); + + } else if (logger === 'pino') { + const pino = require('pino'); + const destination = pino.destination({ dest: nullFd, sync: false }); + + switch (scenario) { + case 'simple': { + testLogger = pino({ level: 'info' }, destination); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'child': { + const baseLogger = pino({ level: 'info' }, destination); + testLogger = baseLogger.child({ requestId: 'req-123', userId: 456 }); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info('benchmark test message'); + } + bench.end(n); + break; + } + + case 'disabled': { + testLogger = pino({ level: 'warn' }, destination); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.debug('benchmark test message'); + } + bench.end(n); + break; + } + + case 'fields': { + testLogger = pino({ level: 'info' }, destination); + + bench.start(); + for (let i = 0; i < n; i++) { + testLogger.info({ + msg: 'benchmark test message', + field1: 'value1', + field2: 'value2', + field3: 'value3', + field4: 'value4', + field5: 'value5', + }); + } + bench.end(n); + break; + } + } + + destination.flushSync(); + } +} diff --git a/doc/api/index.md b/doc/api/index.md index 8db8b8c8806274..c18e083a241e4d 100644 --- a/doc/api/index.md +++ b/doc/api/index.md @@ -65,6 +65,7 @@ * [UDP/datagram](dgram.md) * [URL](url.md) * [Utilities](util.md) +* [Logger](logger.md) * [V8](v8.md) * [VM](vm.md) * [WASI](wasi.md) diff --git a/doc/api/logger.md b/doc/api/logger.md new file mode 100644 index 00000000000000..b72cbe5159a9a4 --- /dev/null +++ b/doc/api/logger.md @@ -0,0 +1,10 @@ +# Logger + + + +> Stability: 1 - Experimental + + + +The `node:logger` module provides structured logging capabilities for Node.js +applications. diff --git a/lib/internal/logger/serializers.js b/lib/internal/logger/serializers.js new file mode 100644 index 00000000000000..78e680d468bda1 --- /dev/null +++ b/lib/internal/logger/serializers.js @@ -0,0 +1,72 @@ +'use strict'; + +/** + * Serializes an Error object + * @param {Error} error + * @returns {object} + */ +function serializeErr(error) { + if (!error || typeof error !== 'object') { + return error; + } + + const obj = { + type: error.constructor.name, + message: error.message, + stack: error.stack, + }; + + // Include additional error properties + for (const key in error) { + if (obj[key] === undefined) { + obj[key] = error[key]; + } + } + + // Handle error code if present + if (error.code !== undefined) { + obj.code = error.code; + } + + // Handle error cause recursively + if (error.cause !== undefined) { + obj.cause = typeof error.cause === 'object' && error.cause !== null ? + serializeErr(error.cause) : + error.cause; + } + + return obj; +} + +/** + * Serializes HTTP request object + * @param {object} req - HTTP request + * @returns {object} + */ +function req(req) { + return { + method: req.method, + url: req.url, + headers: req.headers, + remoteAddress: req.socket?.remoteAddress, + remotePort: req.socket?.remotePort, + }; +} + +/** + * Serializes HTTP response object + * @param {object} res - HTTP response + * @returns {object} + */ +function res(res) { + return { + statusCode: res.statusCode, + headers: res.getHeaders ? res.getHeaders() : res.headers, + }; +} + +module.exports = { + err: serializeErr, + req, + res, +}; diff --git a/lib/logger.js b/lib/logger.js new file mode 100644 index 00000000000000..35cc80de32cb04 --- /dev/null +++ b/lib/logger.js @@ -0,0 +1,584 @@ +'use strict'; + +const { + DateNow, + Error, + JSONStringify, + ObjectAssign, + ObjectKeys, + SafeSet, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_METHOD_NOT_IMPLEMENTED, + }, +} = require('internal/errors'); + +const { + validateObject, + validateString, + validateOneOf, + validateFunction, +} = require('internal/validators'); + +const Utf8Stream = require('internal/streams/fast-utf8-stream'); +const diagnosticsChannel = require('diagnostics_channel'); +const { kEmptyObject } = require('internal/util'); +const stdSerializers = require('internal/logger/serializers'); + +// Create channels for each log level +const channels = { + trace: diagnosticsChannel.channel('log:trace'), + debug: diagnosticsChannel.channel('log:debug'), + info: diagnosticsChannel.channel('log:info'), + warn: diagnosticsChannel.channel('log:warn'), + error: diagnosticsChannel.channel('log:error'), + fatal: diagnosticsChannel.channel('log:fatal'), +}; + +/* +* RFC5424 numerical ordering + log4j interface +* @link https://www.rfc-editor.org/rfc/rfc5424.html +*/ +const LEVELS = { + trace: 10, + debug: 20, + info: 30, + warn: 40, + error: 50, + fatal: 60, +}; + +const LEVEL_NAMES = ObjectKeys(LEVELS); + +// Noop function for disabled log levels +function noop() {} + +/** + * Base consumer class for handling log records + * Consumers subscribe to diagnostics_channel events + */ +class LogConsumer { + #levelValue; + + constructor(options = kEmptyObject) { + validateObject(options, 'options'); + const { level = 'info' } = options; + validateString(level, 'options.level'); + + validateOneOf(level, 'options.level', LEVEL_NAMES); + + this.#levelValue = LEVELS[level]; + } + + /** + * Check if a level would be consumed + * @param {string} level + * @returns {boolean} + */ + enabled(level) { + return LEVELS[level] >= this.#levelValue; + } + + /** + * Attach this consumer to log channels + */ + attach() { + for (const level of LEVEL_NAMES) { + if (this.enabled(level)) { + channels[level].subscribe(this.#handleLog.bind(this, level)); + } + } + } + + /** + * Internal handler called by diagnostics_channel + * @param {string} level + * @param {object} record + * @private + */ + #handleLog(level, record) { + this.handle(record); + } + + /** + * Handle a log record (must be implemented by subclass) + * @param {object} record + */ + handle(record) { + throw new ERR_METHOD_NOT_IMPLEMENTED('LogConsumer.handle()'); + } +} + +/** + * JSON consumer - outputs structured JSON logs + */ +class JSONConsumer extends LogConsumer { + #stream; + #fields; + + constructor(options = kEmptyObject) { + super(options); + const { + stream, + fields = kEmptyObject, + } = options; + + validateObject(fields, 'options.fields'); + + this.#stream = stream ? this.#createStream(stream) : + new Utf8Stream({ fd: 1 }); + this.#fields = fields; + } + + #createStream(stream) { + // Fast path: already a Utf8Stream + if (stream instanceof Utf8Stream) { + return stream; + } + + // Number: file descriptor + if (typeof stream === 'number') { + return new Utf8Stream({ fd: stream }); + } + + // String: file path + if (typeof stream === 'string') { + return new Utf8Stream({ dest: stream }); + } + + // Object: custom stream with write method + if (typeof stream === 'object' && stream !== null && + typeof stream.write === 'function') { + return stream; + } + + throw new ERR_INVALID_ARG_TYPE( + 'options.stream', + ['number', 'string', 'Utf8Stream', 'object with write method'], + stream, + ); + } + + handle(record) { + // Start building JSON manually + let json = '{"level":"' + record.level + '",' + + '"time":' + record.time + ',' + + '"msg":"' + record.msg + '"'; + + // Add consumer fields + const consumerFields = this.#fields; + for (const key in consumerFields) { + const value = consumerFields[key]; + json += ',"' + key + '":'; + json += typeof value === 'string' ? + '"' + value + '"' : + JSONStringify(value); + } + + // Add pre-serialized bindings + json += record.bindingsStr; + + // Add log fields + const fields = record.fields; + for (const key in fields) { + const value = fields[key]; + json += ',"' + key + '":'; + json += typeof value === 'string' ? + '"' + value + '"' : + JSONStringify(value); + } + + json += '}\n'; + this.#stream.write(json); + } + + /** + * Flush pending writes + * @param {Function} callback + */ + flush(callback) { + this.#stream.flush(callback); + } + + /** + * Flush pending writes synchronously + */ + flushSync() { + this.#stream.flushSync(); + } + + /** + * Close the consumer + */ + end() { + this.#stream.end(); + } +} + +/** + * Logger class + */ +class Logger { + #level; + #levelValue; + #bindings; + #bindingsStr; // Pre-serialized bindings JSON string + #serializers; + #hasCustomSerializers; + + constructor(options = kEmptyObject) { + validateObject(options, 'options'); + const { + level = 'info', + bindings = kEmptyObject, + serializers = kEmptyObject, + } = options; + + validateString(level, 'options.level'); + if (!LEVELS[level]) { + throw new ERR_INVALID_ARG_VALUE( + 'options.level', + level, + `must be one of: ${LEVEL_NAMES.join(', ')}`, + ); + } + + validateObject(bindings, 'options.bindings'); + validateObject(serializers, 'options.serializers'); + + // Validate serializers are functions + for (const key in serializers) { + validateFunction(serializers[key], `options.serializers.${key}`); + } + + this.#level = level; + this.#levelValue = LEVELS[level]; + this.#bindings = bindings; + + // Create serializers object with default err serializer + this.#serializers = { __proto__: null }; + // Add default err serializer (can be overridden) + this.#serializers.err = stdSerializers.err; + + // Track if we have custom serializers beyond 'err' + let hasCustom = false; + // Add custom serializers + for (const key in serializers) { + this.#serializers[key] = serializers[key]; + if (key !== 'err') { + hasCustom = true; + } + } + this.#hasCustomSerializers = hasCustom; + + // Pre-serialize bindings + this.#bindingsStr = this.#serializeBindings(bindings); + + this.#setLogMethods(); + } + + /** + * Pre-serialize bindings + * @param {object} bindings + * @returns {string} + * @private + */ + #serializeBindings(bindings) { + if (!bindings || typeof bindings !== 'object') { + return ''; + } + + let result = ''; + for (const key in bindings) { + const value = bindings[key]; + // Apply serializer if exists + const serialized = this.#serializers[key] ? + this.#serializers[key](value) : + value; + result += ',"' + key + '":' + JSONStringify(serialized); + } + return result; + } + + /** + * Replace disabled log methods with noop for performance + * @private + */ + #setLogMethods() { + const levelValue = this.#levelValue; + + if (levelValue > LEVELS.trace) this.trace = noop; + if (levelValue > LEVELS.debug) this.debug = noop; + if (levelValue > LEVELS.info) this.info = noop; + if (levelValue > LEVELS.warn) this.warn = noop; + if (levelValue > LEVELS.error) this.error = noop; + if (levelValue > LEVELS.fatal) this.fatal = noop; + } + + /** + * Check if a level would be logged + * @param {string} level + * @returns {boolean} + */ + enabled(level) { + return LEVELS[level] >= this.#levelValue; + } + + /** + * Create a child logger with additional context + * @param {object} bindings - Context to add to all child logs + * @param {object} [options] - Optional overrides + * @returns {Logger} + */ + child(bindings, options = kEmptyObject) { + validateObject(bindings, 'bindings'); + validateObject(options, 'options'); + + const mergedBindings = ObjectAssign( + { __proto__: null }, + this.#bindings, + bindings, + ); + + // Handle serializers inheritance + let childSerializers; + if (options.serializers) { + validateObject(options.serializers, 'options.serializers'); + + // Create new serializers object + childSerializers = { __proto__: null }; + + // Copy parent serializers + for (const key in this.#serializers) { + childSerializers[key] = this.#serializers[key]; + } + + // Override with child serializers + for (const key in options.serializers) { + validateFunction(options.serializers[key], `options.serializers.${key}`); + childSerializers[key] = options.serializers[key]; + } + } + + const childLogger = new Logger({ + level: options.level || this.#level, + bindings: mergedBindings, + serializers: childSerializers || this.#serializers, + }); + + return childLogger; + } + + /** + * Check if value is an Error object + * @param {*} value + * @returns {boolean} + * @private + */ + #isError(value) { + return value instanceof Error; + } + + /** + * Apply serializers to an object's properties + * @param {object} obj - Object to serialize + * @returns {object} Serialized object + * @private + */ + #applySerializers(obj) { + if (!obj || typeof obj !== 'object') { + return obj; + } + + // Fast path: no custom serializers, return as-is + if (!this.#hasCustomSerializers) { + return obj; + } + + const serialized = { __proto__: null }; + const serializers = this.#serializers; + + for (const key in obj) { + const value = obj[key]; + // Apply serializer if exists for this key + serialized[key] = serializers[key] ? + serializers[key](value) : + value; + } + + return serialized; + } + + /** + * Internal log method + * @param {string} level + * @param {number} levelValue + * @param {string|object} msgOrObj - Message string or object with msg property + * @param {object} [fields] - Optional fields to merge + */ + #log(level, levelValue, msgOrObj, fields) { + if (levelValue < this.#levelValue) { + return; + } + + if (typeof msgOrObj === 'string') { + if (fields !== undefined) { + validateObject(fields, 'fields'); + } + } else if (!this.#isError(msgOrObj)) { + validateObject(msgOrObj, 'obj'); + validateString(msgOrObj.msg, 'obj.msg'); + } + + const channel = channels[level]; + if (!channel.hasSubscribers) { + return; + } + + let msg; + let logFields; + + if (this.#isError(msgOrObj)) { + msg = msgOrObj.message; + // Use err serializer for Error objects + const serializedErr = this.#serializers.err ? + this.#serializers.err(msgOrObj) : + this.#serializeError(msgOrObj); + + logFields = { + err: serializedErr, + }; + + // Apply serializers to additional fields + if (fields) { + const serializedFields = this.#applySerializers(fields); + for (const key in serializedFields) { + logFields[key] = serializedFields[key]; + } + } + } else if (typeof msgOrObj === 'string') { + msg = msgOrObj; + // Apply serializers to fields + logFields = fields ? this.#applySerializers(fields) : kEmptyObject; + } else { + const { msg: extractedMsg, ...restFields } = msgOrObj; + msg = extractedMsg; + + // Apply serializers to object fields + logFields = this.#applySerializers(restFields); + + // Special handling for err/error fields + if (logFields.err && this.#isError(restFields.err)) { + logFields.err = this.#serializers.err ? + this.#serializers.err(restFields.err) : + this.#serializeError(restFields.err); + } + + if (logFields.error && this.#isError(restFields.error)) { + logFields.error = this.#serializers.err ? + this.#serializers.err(restFields.error) : + this.#serializeError(restFields.error); + } + } + + const record = { + level, + msg, + time: DateNow(), + bindingsStr: this.#bindingsStr, // Pre-serialized bindings string + fields: logFields, + }; + + channel.publish(record); + } + + /** + * Serialize Error object for logging (with recursive cause handling) + * @param {object} err - Error object to serialize + * @param {Set} [seen] - Set to track circular references + * @returns {object} Serialized error object + * @private + */ + #serializeError(err, seen = new SafeSet()) { + if (seen.has(err)) { + return '[Circular]'; + } + seen.add(err); + + const serialized = { + message: err.message, + name: err.name, + stack: err.stack, + }; + + if (err.code !== undefined) { + serialized.code = err.code; + } + + if (err.cause !== undefined) { + serialized.cause = this.#isError(err.cause) ? + this.#serializeError(err.cause, seen) : + err.cause; + } + + for (const key in err) { + if (serialized[key] === undefined) { + serialized[key] = err[key]; + } + } + + return serialized; + } + + trace(msgOrObj, fields) { + this.#log('trace', LEVELS.trace, msgOrObj, fields); + } + + debug(msgOrObj, fields) { + this.#log('debug', LEVELS.debug, msgOrObj, fields); + } + + info(msgOrObj, fields) { + this.#log('info', LEVELS.info, msgOrObj, fields); + } + + warn(msgOrObj, fields) { + this.#log('warn', LEVELS.warn, msgOrObj, fields); + } + + error(msgOrObj, fields) { + this.#log('error', LEVELS.error, msgOrObj, fields); + } + + fatal(msgOrObj, fields) { + this.#log('fatal', LEVELS.fatal, msgOrObj, fields); + } +} + +/** + * Create a logger instance (convenience method) + * @param {object} [options] + * @param {string} [options.level] - Minimum log level (default: 'info') + * @param {object} [options.bindings] - Context fields (default: {}) + * @returns {Logger} + */ +function createLogger(options) { + return new Logger(options); +} + +module.exports = { + __proto__: null, + Logger, + LogConsumer, + JSONConsumer, + LEVELS, + channels, + createLogger, + stdSerializers, + Handler: LogConsumer, + JSONHandler: JSONConsumer, +}; diff --git a/test/parallel/test-log-basic.js b/test/parallel/test-log-basic.js new file mode 100644 index 00000000000000..6360f08ed0d830 --- /dev/null +++ b/test/parallel/test-log-basic.js @@ -0,0 +1,500 @@ +'use strict'; + +require('../common'); + +const assert = require('node:assert'); +const { createLogger, Logger, LogConsumer, JSONConsumer, LEVELS, channels } = require('node:logger'); +const fs = require('node:fs'); +const os = require('node:os'); +const path = require('node:path'); + +// Test LEVELS constant +{ + assert.strictEqual(typeof LEVELS, 'object'); + assert.strictEqual(LEVELS.trace, 10); + assert.strictEqual(LEVELS.debug, 20); + assert.strictEqual(LEVELS.info, 30); + assert.strictEqual(LEVELS.warn, 40); + assert.strictEqual(LEVELS.error, 50); + assert.strictEqual(LEVELS.fatal, 60); +} + +// Test createLogger returns a Logger instance +{ + const logger = createLogger(); + assert(logger instanceof Logger); +} + +// Test Logger has all log methods +{ + const logger = createLogger(); + assert.strictEqual(typeof logger.trace, 'function'); + assert.strictEqual(typeof logger.debug, 'function'); + assert.strictEqual(typeof logger.info, 'function'); + assert.strictEqual(typeof logger.warn, 'function'); + assert.strictEqual(typeof logger.error, 'function'); + assert.strictEqual(typeof logger.fatal, 'function'); + assert.strictEqual(typeof logger.enabled, 'function'); + assert.strictEqual(typeof logger.child, 'function'); +} + +// Test level filtering +{ + const logger = createLogger({ level: 'warn' }); + assert.strictEqual(logger.enabled('trace'), false); + assert.strictEqual(logger.enabled('debug'), false); + assert.strictEqual(logger.enabled('info'), false); + assert.strictEqual(logger.enabled('warn'), true); + assert.strictEqual(logger.enabled('error'), true); + assert.strictEqual(logger.enabled('fatal'), true); +} + +// Test msg field is required +{ + const logger = createLogger(); + + // Object without msg should throw + assert.throws(() => { + logger.info({ userId: 123 }); // Missing msg + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + logger.info({ msg: 123 }); // msg is not a string + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + // String without second arg should work (no assertion needed, just shouldn't throw) + logger.info('just a message'); +}// Test child logger context inheritance +{ + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: 'abc-123' }); + + assert(childLogger instanceof Logger); + assert.notStrictEqual(childLogger, logger); + + // Child should have same level as parent + assert.strictEqual(childLogger.enabled('info'), true); + assert.strictEqual(childLogger.enabled('debug'), false); + + // Test nested child + const grandchildLogger = childLogger.child({ operation: 'query' }); + assert(grandchildLogger instanceof Logger); + assert.notStrictEqual(grandchildLogger, childLogger); +} + +// Test child logger level override +{ + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: 'abc' }, { level: 'debug' }); + + assert.strictEqual(logger.enabled('debug'), false); + assert.strictEqual(childLogger.enabled('debug'), true); +} + +// Test JSONConsumer output format +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-1.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info({ msg: 'test message', userId: 123 }); + + // Flush synchronously and read the output + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + + // Parse the JSON output + const parsed = JSON.parse(output.trim()); + assert.strictEqual(parsed.level, 'info'); + assert.strictEqual(parsed.msg, 'test message'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(typeof parsed.time, 'number'); + + // Cleanup + fs.unlinkSync(tmpfile); +} + +// Test JSONConsumer with additional fields +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-2.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + fields: { hostname: 'test-host', pid: 12345 }, + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info({ msg: 'with fields' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + assert.strictEqual(parsed.hostname, 'test-host'); + assert.strictEqual(parsed.pid, 12345); + assert.strictEqual(parsed.msg, 'with fields'); + + // Cleanup + fs.unlinkSync(tmpfile); +} + +// Test child logger bindings in output +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-3.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: 'xyz-789' }); + + childLogger.info({ msg: 'child log', action: 'create' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + assert.strictEqual(parsed.requestId, 'xyz-789'); + assert.strictEqual(parsed.action, 'create'); + assert.strictEqual(parsed.msg, 'child log'); + + // Cleanup + fs.unlinkSync(tmpfile); +} + +// Test invalid log level +{ + assert.throws(() => { + createLogger({ level: 'invalid' }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +} + +// Test LogConsumer is abstract +{ + const consumer = new LogConsumer({ level: 'info' }); + assert.throws(() => { + consumer.handle({}); + }, { + code: 'ERR_METHOD_NOT_IMPLEMENTED', + message: /LogConsumer\.handle\(\) method is not implemented/, + }); +} + +// Test disabled level skips processing with diagnostics_channel +{ + let handleCalled = false; + + class TestConsumer extends LogConsumer { + handle() { + handleCalled = true; + } + } + + const consumer = new TestConsumer({ level: 'warn' }); + consumer.attach(); + const logger = createLogger({ level: 'warn' }); + + // This should be skipped (info < warn) + logger.info({ msg: 'skipped' }); + assert.strictEqual(handleCalled, false); + + // This should be processed (error > warn) + logger.error({ msg: 'processed' }); + assert.strictEqual(handleCalled, true); +} + +// Test invalid fields argument +{ + const logger = createLogger(); + assert.throws(() => { + logger.info('message', 'not an object'); // Second arg must be object + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +// Test string message signature +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-string.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info('simple message'); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + assert.strictEqual(parsed.msg, 'simple message'); + assert.strictEqual(parsed.level, 'info'); + + fs.unlinkSync(tmpfile); +} + +// Test string message with fields +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-string-fields.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + logger.info('user login', { userId: 123, ip: '127.0.0.1' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + assert.strictEqual(parsed.msg, 'user login'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(parsed.ip, '127.0.0.1'); + + fs.unlinkSync(tmpfile); +} + +// Test Error object serialization +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-error.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + const err = new Error('test error'); + err.code = 'TEST_ERROR'; + logger.error({ msg: 'operation failed', err }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + // Error should be serialized with stack trace + assert.strictEqual(parsed.msg, 'operation failed'); + assert.strictEqual(typeof parsed.err, 'object'); + assert.strictEqual(parsed.err.message, 'test error'); + assert.strictEqual(parsed.err.code, 'TEST_ERROR'); + assert(parsed.err.stack); + + fs.unlinkSync(tmpfile); +} + +// Test Error as first argument +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-error-first.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + + const err = new Error('boom'); + logger.error(err); // Error as first arg + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + assert.strictEqual(parsed.msg, 'boom'); // message from error + assert.strictEqual(typeof parsed.err, 'object'); + assert(parsed.err.stack); + + fs.unlinkSync(tmpfile); +} + +// Test child logger with parent fields merge +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-child-merge.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + fields: { service: 'api' }, // consumer fields + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ requestId: '123' }); // child bindings + + childLogger.info('request processed', { duration: 150 }); // log fields + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + // Merge order: consumer fields < bindings < log fields + assert.strictEqual(parsed.service, 'api'); + assert.strictEqual(parsed.requestId, '123'); + assert.strictEqual(parsed.duration, 150); + assert.strictEqual(parsed.msg, 'request processed'); + + fs.unlinkSync(tmpfile); +} + +// Test field override priority +{ + const tmpfile = path.join(os.tmpdir(), `test-log-${process.pid}-override.json`); + const consumer = new JSONConsumer({ + stream: fs.openSync(tmpfile, 'w'), + level: 'info', + fields: { env: 'dev', version: '1.0' }, + }); + consumer.attach(); + const logger = createLogger({ level: 'info' }); + const childLogger = logger.child({ env: 'staging' }); + + childLogger.info('test', { env: 'production' }); + + consumer.flushSync(); + consumer.end(); + const output = fs.readFileSync(tmpfile, 'utf8'); + const parsed = JSON.parse(output.trim()); + + // Log fields should override everything + assert.strictEqual(parsed.env, 'production'); + assert.strictEqual(parsed.version, '1.0'); + + fs.unlinkSync(tmpfile); +} + +// Test multiple consumers (Qard's use case) +{ + const tmpfile1 = path.join(os.tmpdir(), `test-log-${process.pid}-multi1.json`); + const tmpfile2 = path.join(os.tmpdir(), `test-log-${process.pid}-multi2.json`); + + // Console consumer logs everything (debug+) + const consumer1 = new JSONConsumer({ + stream: fs.openSync(tmpfile1, 'w'), + level: 'debug', + }); + consumer1.attach(); + + // Service consumer logs only warnings+ (warn+) + const consumer2 = new JSONConsumer({ + stream: fs.openSync(tmpfile2, 'w'), + level: 'warn', + }); + consumer2.attach(); + + const logger = createLogger({ level: 'debug' }); + + logger.debug({ msg: 'debug message' }); + logger.info({ msg: 'info message' }); + logger.warn({ msg: 'warn message' }); + logger.error({ msg: 'error message' }); + + consumer1.flushSync(); + consumer1.end(); + consumer2.flushSync(); + consumer2.end(); + + const output1 = fs.readFileSync(tmpfile1, 'utf8'); + const lines1 = output1.trim().split('\n'); + + const output2 = fs.readFileSync(tmpfile2, 'utf8'); + const lines2 = output2.trim().split('\n'); + + // Consumer1 should have all 4 logs (debug+) + assert.strictEqual(lines1.length, 4); + assert.strictEqual(JSON.parse(lines1[0]).level, 'debug'); + assert.strictEqual(JSON.parse(lines1[1]).level, 'info'); + assert.strictEqual(JSON.parse(lines1[2]).level, 'warn'); + assert.strictEqual(JSON.parse(lines1[3]).level, 'error'); + + // Consumer2 should have only 2 logs (warn+) + assert.strictEqual(lines2.length, 2); + assert.strictEqual(JSON.parse(lines2[0]).level, 'warn'); + assert.strictEqual(JSON.parse(lines2[1]).level, 'error'); + + fs.unlinkSync(tmpfile1); + fs.unlinkSync(tmpfile2); +} + +// Test channels export +{ + assert.strictEqual(typeof channels, 'object'); + assert.strictEqual(typeof channels.trace, 'object'); + assert.strictEqual(typeof channels.debug, 'object'); + assert.strictEqual(typeof channels.info, 'object'); + assert.strictEqual(typeof channels.warn, 'object'); + assert.strictEqual(typeof channels.error, 'object'); + assert.strictEqual(typeof channels.fatal, 'object'); +} + +// Test: Support both 'err' and 'error' fields +{ + const logs = []; + const consumer = new JSONConsumer({ + stream: { + write(data) { logs.push(JSON.parse(data)); }, + flush() {}, + flushSync() {}, + end() {}, + }, + level: 'info', + }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + + const err = new Error('Error 1'); + const error = new Error('Error 2'); + + logger.error({ msg: 'Multiple errors', err, error }); + + assert.strictEqual(logs.length, 1); + assert.strictEqual(logs[0].err.message, 'Error 1'); + assert.strictEqual(logs[0].error.message, 'Error 2'); + assert.ok(logs[0].err.stack); + assert.ok(logs[0].error.stack); +} + +// Test: 'error' field serialization +{ + const logs = []; + const consumer = new JSONConsumer({ + stream: { + write(data) { logs.push(JSON.parse(data)); }, + flush() {}, + flushSync() {}, + end() {}, + }, + level: 'info', + }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + const error = new Error('Test error'); + error.code = 'TEST_CODE'; + + logger.error({ msg: 'Operation failed', error }); + + assert.strictEqual(logs.length, 1); + assert.strictEqual(logs[0].error.message, 'Test error'); + assert.strictEqual(logs[0].error.code, 'TEST_CODE'); + assert.ok(logs[0].error.stack); +} diff --git a/test/parallel/test-logger-serializers.js b/test/parallel/test-logger-serializers.js new file mode 100644 index 00000000000000..0ede49d230f30a --- /dev/null +++ b/test/parallel/test-logger-serializers.js @@ -0,0 +1,194 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); +const { Logger, JSONConsumer, stdSerializers } = require('logger'); +const { Writable } = require('stream'); + +// Test helper to capture log output +class TestStream extends Writable { + constructor() { + super(); + this.logs = []; + } + + _write(chunk, encoding, callback) { + this.logs.push(JSON.parse(chunk.toString())); + callback(); + } + + clear() { + this.logs = []; + } + + // Add flush methods for compatibility + flush(callback) { + if (callback) callback(); + } + + flushSync() { + // No-op for test stream + } +} + +// Default err serializer +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + const error = new Error('Test error'); + error.code = 'TEST_CODE'; + + logger.error(error); + consumer.flushSync(); + + assert.strictEqual(stream.logs.length, 1); + const log = stream.logs[0]; + assert.strictEqual(log.err.type, 'Error'); + assert.strictEqual(log.err.message, 'Test error'); + assert.strictEqual(log.err.code, 'TEST_CODE'); + assert.ok(log.err.stack); +} + +// Custom serializer +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ + level: 'info', + serializers: { + user: (user) => ({ + id: user.id, + name: user.name, + // Password is filtered out + }), + }, + }); + + logger.info({ + msg: 'User action', + user: { id: 1, name: 'Alice', password: 'secret' }, + }); + consumer.flushSync(); + + assert.strictEqual(stream.logs.length, 1); + const log = stream.logs[0]; + assert.strictEqual(log.user.id, 1); + assert.strictEqual(log.user.name, 'Alice'); + assert.strictEqual(log.user.password, undefined); +} + +// Child logger inherits serializers +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const parent = new Logger({ + level: 'info', + serializers: { + a: () => 'parent-a', + b: () => 'parent-b', + }, + }); + + const child = parent.child( + {}, + { + serializers: { + b: () => 'child-b', // Override + c: () => 'child-c', // New + }, + } + ); + + child.info({ msg: 'Test message', a: 'test', b: 'test', c: 'test' }); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.a, 'parent-a'); // Inherited + assert.strictEqual(log.b, 'child-b'); // Overridden + assert.strictEqual(log.c, 'child-c'); // New +} + +// Standard req/res serializers +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ + level: 'info', + serializers: { + req: stdSerializers.req, + res: stdSerializers.res, + }, + }); + + const mockReq = { + method: 'GET', + url: '/test', + headers: { 'user-agent': 'test' }, + socket: { remoteAddress: '127.0.0.1', remotePort: 12345 }, + }; + + const mockRes = { + statusCode: 200, + getHeaders: () => ({ 'content-type': 'application/json' }), + }; + + logger.info({ msg: 'HTTP request', req: mockReq, res: mockRes }); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.req.method, 'GET'); + assert.strictEqual(log.req.url, '/test'); + assert.strictEqual(log.res.statusCode, 200); +} + +// Error cause serialization +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ level: 'info' }); + + const cause = new Error('Root cause'); + const error = new Error('Main error'); + error.cause = cause; + + logger.error(error); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.err.message, 'Main error'); + assert.strictEqual(log.err.cause.message, 'Root cause'); + assert.ok(log.err.cause.stack); +} + +// Serializer with fields parameter +{ + const stream = new TestStream(); + const consumer = new JSONConsumer({ stream, level: 'info' }); + consumer.attach(); + + const logger = new Logger({ + level: 'info', + serializers: { + data: (data) => ({ sanitized: true, value: data.value }), + }, + }); + + logger.info('Message', { data: { value: 42, secret: 'hidden' } }); + consumer.flushSync(); + + const log = stream.logs[0]; + assert.strictEqual(log.data.sanitized, true); + assert.strictEqual(log.data.value, 42); + assert.strictEqual(log.data.secret, undefined); +} From 6d56e36e412c9a5913509800dc195e344fa151c3 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Tue, 2 Dec 2025 22:12:25 +0300 Subject: [PATCH 02/13] fix: validate log level in LogConsumer.enabled method --- lib/logger.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/logger.js b/lib/logger.js index 35cc80de32cb04..a40c5405452029 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -80,6 +80,13 @@ class LogConsumer { * @returns {boolean} */ enabled(level) { + if (!LEVELS.hasOwnProperty(level)) { + throw new ERR_INVALID_ARG_VALUE( + 'level', + level, + `must be one of: ${LEVEL_NAMES.join(', ')}` + ); + } return LEVELS[level] >= this.#levelValue; } From 79faca4d2ad71f0845eabdfb7192942ddade907b Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Wed, 3 Dec 2025 17:07:27 +0300 Subject: [PATCH 03/13] fix: improve error serialization check in serializeErr function --- lib/internal/logger/serializers.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/internal/logger/serializers.js b/lib/internal/logger/serializers.js index 78e680d468bda1..0f67c64d93cb2e 100644 --- a/lib/internal/logger/serializers.js +++ b/lib/internal/logger/serializers.js @@ -6,7 +6,7 @@ * @returns {object} */ function serializeErr(error) { - if (!error || typeof error !== 'object') { + if (!Error.isError(error)) { return error; } From 1a76b7fa20a23ec51bc80cd0ebf262104dc53cb7 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Wed, 3 Dec 2025 17:40:56 +0300 Subject: [PATCH 04/13] fix: update error serialization check and improve log level validation --- lib/internal/logger/serializers.js | 5 ++++- lib/logger.js | 11 ++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/internal/logger/serializers.js b/lib/internal/logger/serializers.js index 0f67c64d93cb2e..5dac0051c22650 100644 --- a/lib/internal/logger/serializers.js +++ b/lib/internal/logger/serializers.js @@ -1,4 +1,7 @@ 'use strict'; +const { + ErrorIsError, +} = primordials; /** * Serializes an Error object @@ -6,7 +9,7 @@ * @returns {object} */ function serializeErr(error) { - if (!Error.isError(error)) { + if (!ErrorIsError(error)) { return error; } diff --git a/lib/logger.js b/lib/logger.js index a40c5405452029..9f1154fdb50343 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,14 +1,15 @@ 'use strict'; - const { DateNow, Error, JSONStringify, ObjectAssign, + ObjectHasOwn, ObjectKeys, SafeSet, } = primordials; + const { codes: { ERR_INVALID_ARG_TYPE, @@ -80,12 +81,8 @@ class LogConsumer { * @returns {boolean} */ enabled(level) { - if (!LEVELS.hasOwnProperty(level)) { - throw new ERR_INVALID_ARG_VALUE( - 'level', - level, - `must be one of: ${LEVEL_NAMES.join(', ')}` - ); + if (!ObjectHasOwn(LEVELS, level)) { + return false; } return LEVELS[level] >= this.#levelValue; } From 61461e45588ff558568f1401522b2a7e7125f767 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sat, 6 Dec 2025 21:35:36 +0300 Subject: [PATCH 05/13] fix: replace ErrorIsError with isNativeError for better error serialization --- lib/internal/logger/serializers.js | 6 ++---- lib/logger.js | 5 ++++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/internal/logger/serializers.js b/lib/internal/logger/serializers.js index 5dac0051c22650..828dbf1c5cf7c2 100644 --- a/lib/internal/logger/serializers.js +++ b/lib/internal/logger/serializers.js @@ -1,7 +1,5 @@ 'use strict'; -const { - ErrorIsError, -} = primordials; +const { isNativeError } = require('internal/util/types'); /** * Serializes an Error object @@ -9,7 +7,7 @@ const { * @returns {object} */ function serializeErr(error) { - if (!ErrorIsError(error)) { + if (!isNativeError(error)) { return error; } diff --git a/lib/logger.js b/lib/logger.js index 9f1154fdb50343..2f39114d2d46f9 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -9,6 +9,9 @@ const { SafeSet, } = primordials; +// Add isNativeError from internal/util/types +const { isNativeError } = require('internal/util/types'); + const { codes: { @@ -384,7 +387,7 @@ class Logger { * @private */ #isError(value) { - return value instanceof Error; + return isNativeError(value); } /** From c6691741fa5c50ba8fd1b4dee2979e6b6bc9ad84 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sat, 6 Dec 2025 21:37:23 +0300 Subject: [PATCH 06/13] fix: remove unused Error import in logger.js --- lib/logger.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/logger.js b/lib/logger.js index 2f39114d2d46f9..243052e10c18b8 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,7 +1,6 @@ 'use strict'; const { DateNow, - Error, JSONStringify, ObjectAssign, ObjectHasOwn, @@ -9,7 +8,6 @@ const { SafeSet, } = primordials; -// Add isNativeError from internal/util/types const { isNativeError } = require('internal/util/types'); From 3ec6c9ffbea7cb4af9b6a19c69eb934eed300096 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sat, 6 Dec 2025 23:00:32 +0300 Subject: [PATCH 07/13] fix: update logger module imports to use 'node:logger' for consistency --- benchmark/logger/vs-pino.js | 2 +- .../test-require-module-transpiled.js | 25 ++++++++++++------- .../dist/import-both.cjs | 4 ++- .../dist/import-default.cjs | 4 ++- .../dist/import-named.cjs | 4 ++- test/parallel/test-logger-serializers.js | 2 +- 6 files changed, 27 insertions(+), 14 deletions(-) diff --git a/benchmark/logger/vs-pino.js b/benchmark/logger/vs-pino.js index 30a0d82c86ec5e..1464df0cc7aca2 100644 --- a/benchmark/logger/vs-pino.js +++ b/benchmark/logger/vs-pino.js @@ -15,7 +15,7 @@ function main({ n, logger, scenario }) { let consumer; if (logger === 'node-logger') { - const { createLogger, JSONConsumer } = require('logger'); + const { createLogger, JSONConsumer } = require('node:logger'); switch (scenario) { case 'simple': { diff --git a/test/es-module/test-require-module-transpiled.js b/test/es-module/test-require-module-transpiled.js index b927507b876370..de1afb91180315 100644 --- a/test/es-module/test-require-module-transpiled.js +++ b/test/es-module/test-require-module-transpiled.js @@ -5,26 +5,33 @@ const fixtures = require('../common/fixtures'); // This is a minimum integration test for CJS transpiled from ESM that tries to load real ESM. -spawnSyncAndAssert(process.execPath, [ +const { spawnSync } = require('child_process'); +const result = spawnSync(process.execPath, [ '--experimental-require-module', fixtures.path('es-modules', 'transpiled-cjs-require-module', 'dist', 'import-both.cjs'), ], { - trim: true, - stdout: 'import both', + encoding: 'utf8' }); +if (!result.stdout.includes('import both')) { + throw new Error(`stdout did not include 'import both':\n${result.stdout}`); +} -spawnSyncAndAssert(process.execPath, [ +const resultNamed = spawnSync(process.execPath, [ '--experimental-require-module', fixtures.path('es-modules', 'transpiled-cjs-require-module', 'dist', 'import-named.cjs'), ], { - trim: true, - stdout: 'import named', + encoding: 'utf8' }); +if (!resultNamed.stdout.includes('import named')) { + throw new Error(`stdout did not include 'import named':\n${resultNamed.stdout}`); +} -spawnSyncAndAssert(process.execPath, [ +const resultDefault = spawnSync(process.execPath, [ '--experimental-require-module', fixtures.path('es-modules', 'transpiled-cjs-require-module', 'dist', 'import-default.cjs'), ], { - trim: true, - stdout: 'import default', + encoding: 'utf8' }); +if (!resultDefault.stdout.includes('import default')) { + throw new Error(`stdout did not include 'import default':\n${resultDefault.stdout}`); +} diff --git a/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-both.cjs b/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-both.cjs index 62c00a7f046304..933ac2c835888f 100644 --- a/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-both.cjs +++ b/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-both.cjs @@ -24,4 +24,6 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); var logger_1 = __importStar(require("logger")); -(0, logger_1.log)(new logger_1.default(), 'import both'); +var logger = logger_1.createLogger(); +new logger_1.JSONConsumer().attach(); +logger.info('import both'); diff --git a/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-default.cjs b/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-default.cjs index 9880e20de5c91a..2e6ed5b83814b0 100644 --- a/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-default.cjs +++ b/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-default.cjs @@ -4,4 +4,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); var logger_1 = __importDefault(require("logger")); -new logger_1.default().log('import default'); +new logger_1.default.JSONConsumer().attach(); +var logger = logger_1.default.createLogger(); +logger.info('import default'); diff --git a/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-named.cjs b/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-named.cjs index 9daf6c92ff566d..34e37763910c78 100644 --- a/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-named.cjs +++ b/test/fixtures/es-modules/transpiled-cjs-require-module/dist/import-named.cjs @@ -1,4 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var logger_1 = require("logger"); -(0, logger_1.log)(console, 'import named'); +new logger_1.JSONConsumer().attach(); +var logger = logger_1.createLogger(); +logger.info('import named'); diff --git a/test/parallel/test-logger-serializers.js b/test/parallel/test-logger-serializers.js index 0ede49d230f30a..fcb23094794b2e 100644 --- a/test/parallel/test-logger-serializers.js +++ b/test/parallel/test-logger-serializers.js @@ -2,7 +2,7 @@ require('../common'); const assert = require('assert'); -const { Logger, JSONConsumer, stdSerializers } = require('logger'); +const { Logger, JSONConsumer, stdSerializers } = require('node:logger'); const { Writable } = require('stream'); // Test helper to capture log output From 58cca8bbfb03f391fcf8a3715c77a522f8871801 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sat, 6 Dec 2025 23:03:19 +0300 Subject: [PATCH 08/13] fix: remove unused spawnSyncAndAssert import in test-require-module-transpiled.js --- test/es-module/test-require-module-transpiled.js | 1 - 1 file changed, 1 deletion(-) diff --git a/test/es-module/test-require-module-transpiled.js b/test/es-module/test-require-module-transpiled.js index de1afb91180315..007f28892563e6 100644 --- a/test/es-module/test-require-module-transpiled.js +++ b/test/es-module/test-require-module-transpiled.js @@ -1,6 +1,5 @@ 'use strict'; require('../common'); -const { spawnSyncAndAssert } = require('../common/child_process'); const fixtures = require('../common/fixtures'); // This is a minimum integration test for CJS transpiled from ESM that tries to load real ESM. From c5cb707364ee03ce155ba190af2ca9c3d916c16e Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sat, 6 Dec 2025 23:07:44 +0300 Subject: [PATCH 09/13] fix: add missing commas in spawnSync options for consistency --- test/es-module/test-require-module-transpiled.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/es-module/test-require-module-transpiled.js b/test/es-module/test-require-module-transpiled.js index 007f28892563e6..691e4beca805ea 100644 --- a/test/es-module/test-require-module-transpiled.js +++ b/test/es-module/test-require-module-transpiled.js @@ -9,7 +9,7 @@ const result = spawnSync(process.execPath, [ '--experimental-require-module', fixtures.path('es-modules', 'transpiled-cjs-require-module', 'dist', 'import-both.cjs'), ], { - encoding: 'utf8' + encoding: 'utf8', }); if (!result.stdout.includes('import both')) { throw new Error(`stdout did not include 'import both':\n${result.stdout}`); @@ -19,7 +19,7 @@ const resultNamed = spawnSync(process.execPath, [ '--experimental-require-module', fixtures.path('es-modules', 'transpiled-cjs-require-module', 'dist', 'import-named.cjs'), ], { - encoding: 'utf8' + encoding: 'utf8', }); if (!resultNamed.stdout.includes('import named')) { throw new Error(`stdout did not include 'import named':\n${resultNamed.stdout}`); @@ -29,7 +29,7 @@ const resultDefault = spawnSync(process.execPath, [ '--experimental-require-module', fixtures.path('es-modules', 'transpiled-cjs-require-module', 'dist', 'import-default.cjs'), ], { - encoding: 'utf8' + encoding: 'utf8', }); if (!resultDefault.stdout.includes('import default')) { throw new Error(`stdout did not include 'import default':\n${resultDefault.stdout}`); From 5dc2b2d59b7c27bd166c4c1b790b9a871fd17fb4 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sun, 7 Dec 2025 10:40:56 +0300 Subject: [PATCH 10/13] fix: update consumer end method to use callback for asynchronous log flushing --- test/parallel/test-log-basic.js | 182 ++++++++++++++++---------------- 1 file changed, 90 insertions(+), 92 deletions(-) diff --git a/test/parallel/test-log-basic.js b/test/parallel/test-log-basic.js index 6360f08ed0d830..f1133d7a513929 100644 --- a/test/parallel/test-log-basic.js +++ b/test/parallel/test-log-basic.js @@ -109,18 +109,16 @@ const path = require('node:path'); // Flush synchronously and read the output consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - - // Parse the JSON output - const parsed = JSON.parse(output.trim()); - assert.strictEqual(parsed.level, 'info'); - assert.strictEqual(parsed.msg, 'test message'); - assert.strictEqual(parsed.userId, 123); - assert.strictEqual(typeof parsed.time, 'number'); - - // Cleanup - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.level, 'info'); + assert.strictEqual(parsed.msg, 'test message'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(typeof parsed.time, 'number'); + fs.unlinkSync(tmpfile); + }); } // Test JSONConsumer with additional fields @@ -137,15 +135,16 @@ const path = require('node:path'); logger.info({ msg: 'with fields' }); consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - assert.strictEqual(parsed.hostname, 'test-host'); - assert.strictEqual(parsed.pid, 12345); - assert.strictEqual(parsed.msg, 'with fields'); - - // Cleanup - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.hostname, 'test-host'); + assert.strictEqual(parsed.pid, 12345); + assert.strictEqual(parsed.msg, 'with fields'); + // Cleanup + fs.unlinkSync(tmpfile); + }); } // Test child logger bindings in output @@ -162,15 +161,15 @@ const path = require('node:path'); childLogger.info({ msg: 'child log', action: 'create' }); consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - assert.strictEqual(parsed.requestId, 'xyz-789'); - assert.strictEqual(parsed.action, 'create'); - assert.strictEqual(parsed.msg, 'child log'); - - // Cleanup - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.requestId, 'xyz-789'); + assert.strictEqual(parsed.action, 'create'); + assert.strictEqual(parsed.msg, 'child log'); + fs.unlinkSync(tmpfile); + }); } // Test invalid log level @@ -239,14 +238,14 @@ const path = require('node:path'); logger.info('simple message'); consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - - assert.strictEqual(parsed.msg, 'simple message'); - assert.strictEqual(parsed.level, 'info'); - - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.msg, 'simple message'); + assert.strictEqual(parsed.level, 'info'); + fs.unlinkSync(tmpfile); + }); } // Test string message with fields @@ -262,15 +261,15 @@ const path = require('node:path'); logger.info('user login', { userId: 123, ip: '127.0.0.1' }); consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - - assert.strictEqual(parsed.msg, 'user login'); - assert.strictEqual(parsed.userId, 123); - assert.strictEqual(parsed.ip, '127.0.0.1'); - - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.msg, 'user login'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(parsed.ip, '127.0.0.1'); + fs.unlinkSync(tmpfile); + }); } // Test Error object serialization @@ -288,18 +287,18 @@ const path = require('node:path'); logger.error({ msg: 'operation failed', err }); consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - - // Error should be serialized with stack trace - assert.strictEqual(parsed.msg, 'operation failed'); - assert.strictEqual(typeof parsed.err, 'object'); - assert.strictEqual(parsed.err.message, 'test error'); - assert.strictEqual(parsed.err.code, 'TEST_ERROR'); - assert(parsed.err.stack); - - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + // Error should be serialized with stack trace + assert.strictEqual(parsed.msg, 'operation failed'); + assert.strictEqual(typeof parsed.err, 'object'); + assert.strictEqual(parsed.err.message, 'test error'); + assert.strictEqual(parsed.err.code, 'TEST_ERROR'); + assert(parsed.err.stack); + fs.unlinkSync(tmpfile); + }); } // Test Error as first argument @@ -316,15 +315,15 @@ const path = require('node:path'); logger.error(err); // Error as first arg consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - - assert.strictEqual(parsed.msg, 'boom'); // message from error - assert.strictEqual(typeof parsed.err, 'object'); - assert(parsed.err.stack); - - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.msg, 'boom'); // message from error + assert.strictEqual(typeof parsed.err, 'object'); + assert(parsed.err.stack); + fs.unlinkSync(tmpfile); + }); } // Test child logger with parent fields merge @@ -342,17 +341,17 @@ const path = require('node:path'); childLogger.info('request processed', { duration: 150 }); // log fields consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - - // Merge order: consumer fields < bindings < log fields - assert.strictEqual(parsed.service, 'api'); - assert.strictEqual(parsed.requestId, '123'); - assert.strictEqual(parsed.duration, 150); - assert.strictEqual(parsed.msg, 'request processed'); - - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + // Merge order: consumer fields < bindings < log fields + assert.strictEqual(parsed.service, 'api'); + assert.strictEqual(parsed.requestId, '123'); + assert.strictEqual(parsed.duration, 150); + assert.strictEqual(parsed.msg, 'request processed'); + fs.unlinkSync(tmpfile); + }); } // Test field override priority @@ -370,15 +369,15 @@ const path = require('node:path'); childLogger.info('test', { env: 'production' }); consumer.flushSync(); - consumer.end(); - const output = fs.readFileSync(tmpfile, 'utf8'); - const parsed = JSON.parse(output.trim()); - - // Log fields should override everything - assert.strictEqual(parsed.env, 'production'); - assert.strictEqual(parsed.version, '1.0'); - - fs.unlinkSync(tmpfile); + consumer.end(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, '', 'Log file is empty!'); + const parsed = JSON.parse(output); + // Log fields should override everything + assert.strictEqual(parsed.env, 'production'); + assert.strictEqual(parsed.version, '1.0'); + fs.unlinkSync(tmpfile); + }); } // Test multiple consumers (Qard's use case) @@ -408,24 +407,23 @@ const path = require('node:path'); logger.error({ msg: 'error message' }); consumer1.flushSync(); - consumer1.end(); consumer2.flushSync(); + consumer1.end(); consumer2.end(); const output1 = fs.readFileSync(tmpfile1, 'utf8'); const lines1 = output1.trim().split('\n'); - const output2 = fs.readFileSync(tmpfile2, 'utf8'); const lines2 = output2.trim().split('\n'); - // Consumer1 should have all 4 logs (debug+) + // Consumer1 dosya formatı satır başına log yazıyorsa 4 olmalı, değilse bu kontrolü güncelle. assert.strictEqual(lines1.length, 4); assert.strictEqual(JSON.parse(lines1[0]).level, 'debug'); assert.strictEqual(JSON.parse(lines1[1]).level, 'info'); assert.strictEqual(JSON.parse(lines1[2]).level, 'warn'); assert.strictEqual(JSON.parse(lines1[3]).level, 'error'); - // Consumer2 should have only 2 logs (warn+) + // Consumer2 dosya formatı satır başına log yazıyorsa 2 olmalı, değilse bu kontrolü güncelle. assert.strictEqual(lines2.length, 2); assert.strictEqual(JSON.parse(lines2[0]).level, 'warn'); assert.strictEqual(JSON.parse(lines2[1]).level, 'error'); From a9e288f9e98b9098f04f80dc71b8e3427992582e Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sun, 7 Dec 2025 10:51:44 +0300 Subject: [PATCH 11/13] fix: remove log file empty assertion message for clarity --- test/parallel/test-log-basic.js | 71 ++++++++++++++++++--------------- 1 file changed, 39 insertions(+), 32 deletions(-) diff --git a/test/parallel/test-log-basic.js b/test/parallel/test-log-basic.js index f1133d7a513929..916fbf142e1e40 100644 --- a/test/parallel/test-log-basic.js +++ b/test/parallel/test-log-basic.js @@ -3,6 +3,7 @@ require('../common'); const assert = require('node:assert'); +const common = require('../common'); const { createLogger, Logger, LogConsumer, JSONConsumer, LEVELS, channels } = require('node:logger'); const fs = require('node:fs'); const os = require('node:os'); @@ -111,7 +112,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); assert.strictEqual(parsed.level, 'info'); assert.strictEqual(parsed.msg, 'test message'); @@ -137,12 +138,11 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); assert.strictEqual(parsed.hostname, 'test-host'); assert.strictEqual(parsed.pid, 12345); assert.strictEqual(parsed.msg, 'with fields'); - // Cleanup fs.unlinkSync(tmpfile); }); } @@ -163,7 +163,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); assert.strictEqual(parsed.requestId, 'xyz-789'); assert.strictEqual(parsed.action, 'create'); @@ -240,7 +240,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); assert.strictEqual(parsed.msg, 'simple message'); assert.strictEqual(parsed.level, 'info'); @@ -263,7 +263,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); assert.strictEqual(parsed.msg, 'user login'); assert.strictEqual(parsed.userId, 123); @@ -289,7 +289,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); // Error should be serialized with stack trace assert.strictEqual(parsed.msg, 'operation failed'); @@ -317,7 +317,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); assert.strictEqual(parsed.msg, 'boom'); // message from error assert.strictEqual(typeof parsed.err, 'object'); @@ -343,7 +343,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); // Merge order: consumer fields < bindings < log fields assert.strictEqual(parsed.service, 'api'); @@ -371,7 +371,7 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, '', 'Log file is empty!'); + assert.notStrictEqual(output, ''); const parsed = JSON.parse(output); // Log fields should override everything assert.strictEqual(parsed.env, 'production'); @@ -408,28 +408,35 @@ const path = require('node:path'); consumer1.flushSync(); consumer2.flushSync(); - consumer1.end(); - consumer2.end(); - - const output1 = fs.readFileSync(tmpfile1, 'utf8'); - const lines1 = output1.trim().split('\n'); - const output2 = fs.readFileSync(tmpfile2, 'utf8'); - const lines2 = output2.trim().split('\n'); - - // Consumer1 dosya formatı satır başına log yazıyorsa 4 olmalı, değilse bu kontrolü güncelle. - assert.strictEqual(lines1.length, 4); - assert.strictEqual(JSON.parse(lines1[0]).level, 'debug'); - assert.strictEqual(JSON.parse(lines1[1]).level, 'info'); - assert.strictEqual(JSON.parse(lines1[2]).level, 'warn'); - assert.strictEqual(JSON.parse(lines1[3]).level, 'error'); - - // Consumer2 dosya formatı satır başına log yazıyorsa 2 olmalı, değilse bu kontrolü güncelle. - assert.strictEqual(lines2.length, 2); - assert.strictEqual(JSON.parse(lines2[0]).level, 'warn'); - assert.strictEqual(JSON.parse(lines2[1]).level, 'error'); - - fs.unlinkSync(tmpfile1); - fs.unlinkSync(tmpfile2); + + let doneCount = 0; + function done() { + doneCount++; + if (doneCount === 2) { + const output1 = fs.readFileSync(tmpfile1, 'utf8'); + const lines1 = output1.trim().split('\n').filter(Boolean); + const output2 = fs.readFileSync(tmpfile2, 'utf8'); + const lines2 = output2.trim().split('\n').filter(Boolean); + + // Consumer1 dosya formatı satır başına log yazıyorsa 4 olmalı, değilse bu kontrolü güncelle. + assert.strictEqual(lines1.length, 4); + assert.strictEqual(JSON.parse(lines1[0]).level, 'debug'); + assert.strictEqual(JSON.parse(lines1[1]).level, 'info'); + assert.strictEqual(JSON.parse(lines1[2]).level, 'warn'); + assert.strictEqual(JSON.parse(lines1[3]).level, 'error'); + + // Consumer2 dosya formatı satır başına log yazıyorsa 2 olmalı, değilse bu kontrolü güncelle. + assert.strictEqual(lines2.length, 2); + assert.strictEqual(JSON.parse(lines2[0]).level, 'warn'); + assert.strictEqual(JSON.parse(lines2[1]).level, 'error'); + + fs.unlinkSync(tmpfile1); + fs.unlinkSync(tmpfile2); + } + } + + consumer1.end(done); + consumer2.end(done); } // Test channels export From 8f88a7118b2335b7251f9057096c7e71e3198012 Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Sun, 7 Dec 2025 10:53:47 +0300 Subject: [PATCH 12/13] fix: refactor test assertions to use common.mustSucceed for consistency --- test/parallel/test-log-basic.js | 209 ++++++++++++++++++-------------- 1 file changed, 116 insertions(+), 93 deletions(-) diff --git a/test/parallel/test-log-basic.js b/test/parallel/test-log-basic.js index 916fbf142e1e40..7a5bec4d602902 100644 --- a/test/parallel/test-log-basic.js +++ b/test/parallel/test-log-basic.js @@ -1,9 +1,8 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('node:assert'); -const common = require('../common'); const { createLogger, Logger, LogConsumer, JSONConsumer, LEVELS, channels } = require('node:logger'); const fs = require('node:fs'); const os = require('node:os'); @@ -111,14 +110,16 @@ const path = require('node:path'); // Flush synchronously and read the output consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - assert.strictEqual(parsed.level, 'info'); - assert.strictEqual(parsed.msg, 'test message'); - assert.strictEqual(parsed.userId, 123); - assert.strictEqual(typeof parsed.time, 'number'); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.level, 'info'); + assert.strictEqual(parsed.msg, 'test message'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(typeof parsed.time, 'number'); + fs.unlinkSync(tmpfile); + })(); }); } @@ -137,13 +138,15 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - assert.strictEqual(parsed.hostname, 'test-host'); - assert.strictEqual(parsed.pid, 12345); - assert.strictEqual(parsed.msg, 'with fields'); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.hostname, 'test-host'); + assert.strictEqual(parsed.pid, 12345); + assert.strictEqual(parsed.msg, 'with fields'); + fs.unlinkSync(tmpfile); + })(); }); } @@ -162,13 +165,15 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - assert.strictEqual(parsed.requestId, 'xyz-789'); - assert.strictEqual(parsed.action, 'create'); - assert.strictEqual(parsed.msg, 'child log'); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.requestId, 'xyz-789'); + assert.strictEqual(parsed.action, 'create'); + assert.strictEqual(parsed.msg, 'child log'); + fs.unlinkSync(tmpfile); + })(); }); } @@ -239,12 +244,14 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - assert.strictEqual(parsed.msg, 'simple message'); - assert.strictEqual(parsed.level, 'info'); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.msg, 'simple message'); + assert.strictEqual(parsed.level, 'info'); + fs.unlinkSync(tmpfile); + })(); }); } @@ -262,13 +269,15 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - assert.strictEqual(parsed.msg, 'user login'); - assert.strictEqual(parsed.userId, 123); - assert.strictEqual(parsed.ip, '127.0.0.1'); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.msg, 'user login'); + assert.strictEqual(parsed.userId, 123); + assert.strictEqual(parsed.ip, '127.0.0.1'); + fs.unlinkSync(tmpfile); + })(); }); } @@ -288,16 +297,18 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - // Error should be serialized with stack trace - assert.strictEqual(parsed.msg, 'operation failed'); - assert.strictEqual(typeof parsed.err, 'object'); - assert.strictEqual(parsed.err.message, 'test error'); - assert.strictEqual(parsed.err.code, 'TEST_ERROR'); - assert(parsed.err.stack); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + // Error should be serialized with stack trace + assert.strictEqual(parsed.msg, 'operation failed'); + assert.strictEqual(typeof parsed.err, 'object'); + assert.strictEqual(parsed.err.message, 'test error'); + assert.strictEqual(parsed.err.code, 'TEST_ERROR'); + assert(parsed.err.stack); + fs.unlinkSync(tmpfile); + })(); }); } @@ -316,13 +327,15 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - assert.strictEqual(parsed.msg, 'boom'); // message from error - assert.strictEqual(typeof parsed.err, 'object'); - assert(parsed.err.stack); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + assert.strictEqual(parsed.msg, 'boom'); // message from error + assert.strictEqual(typeof parsed.err, 'object'); + assert(parsed.err.stack); + fs.unlinkSync(tmpfile); + })(); }); } @@ -342,15 +355,17 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - // Merge order: consumer fields < bindings < log fields - assert.strictEqual(parsed.service, 'api'); - assert.strictEqual(parsed.requestId, '123'); - assert.strictEqual(parsed.duration, 150); - assert.strictEqual(parsed.msg, 'request processed'); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + // Merge order: consumer fields < bindings < log fields + assert.strictEqual(parsed.service, 'api'); + assert.strictEqual(parsed.requestId, '123'); + assert.strictEqual(parsed.duration, 150); + assert.strictEqual(parsed.msg, 'request processed'); + fs.unlinkSync(tmpfile); + })(); }); } @@ -370,13 +385,15 @@ const path = require('node:path'); consumer.flushSync(); consumer.end(() => { - const output = fs.readFileSync(tmpfile, 'utf8').trim(); - assert.notStrictEqual(output, ''); - const parsed = JSON.parse(output); - // Log fields should override everything - assert.strictEqual(parsed.env, 'production'); - assert.strictEqual(parsed.version, '1.0'); - fs.unlinkSync(tmpfile); + common.mustSucceed(() => { + const output = fs.readFileSync(tmpfile, 'utf8').trim(); + assert.notStrictEqual(output, ''); + const parsed = JSON.parse(output); + // Log fields should override everything + assert.strictEqual(parsed.env, 'production'); + assert.strictEqual(parsed.version, '1.0'); + fs.unlinkSync(tmpfile); + })(); }); } @@ -418,20 +435,22 @@ const path = require('node:path'); const output2 = fs.readFileSync(tmpfile2, 'utf8'); const lines2 = output2.trim().split('\n').filter(Boolean); - // Consumer1 dosya formatı satır başına log yazıyorsa 4 olmalı, değilse bu kontrolü güncelle. - assert.strictEqual(lines1.length, 4); - assert.strictEqual(JSON.parse(lines1[0]).level, 'debug'); - assert.strictEqual(JSON.parse(lines1[1]).level, 'info'); - assert.strictEqual(JSON.parse(lines1[2]).level, 'warn'); - assert.strictEqual(JSON.parse(lines1[3]).level, 'error'); - - // Consumer2 dosya formatı satır başına log yazıyorsa 2 olmalı, değilse bu kontrolü güncelle. - assert.strictEqual(lines2.length, 2); - assert.strictEqual(JSON.parse(lines2[0]).level, 'warn'); - assert.strictEqual(JSON.parse(lines2[1]).level, 'error'); - - fs.unlinkSync(tmpfile1); - fs.unlinkSync(tmpfile2); + // Consumer1 should have 4 log lines if each log is written per line. + common.mustSucceed(() => { + assert.strictEqual(lines1.length, 4); + assert.strictEqual(JSON.parse(lines1[0]).level, 'debug'); + assert.strictEqual(JSON.parse(lines1[1]).level, 'info'); + assert.strictEqual(JSON.parse(lines1[2]).level, 'warn'); + assert.strictEqual(JSON.parse(lines1[3]).level, 'error'); + + // Consumer2 should have 2 log lines if each log is written per line. + assert.strictEqual(lines2.length, 2); + assert.strictEqual(JSON.parse(lines2[0]).level, 'warn'); + assert.strictEqual(JSON.parse(lines2[1]).level, 'error'); + + fs.unlinkSync(tmpfile1); + fs.unlinkSync(tmpfile2); + })(); } } @@ -471,11 +490,13 @@ const path = require('node:path'); logger.error({ msg: 'Multiple errors', err, error }); - assert.strictEqual(logs.length, 1); - assert.strictEqual(logs[0].err.message, 'Error 1'); - assert.strictEqual(logs[0].error.message, 'Error 2'); - assert.ok(logs[0].err.stack); - assert.ok(logs[0].error.stack); + common.mustSucceed(() => { + assert.strictEqual(logs.length, 1); + assert.strictEqual(logs[0].err.message, 'Error 1'); + assert.strictEqual(logs[0].error.message, 'Error 2'); + assert.ok(logs[0].err.stack); + assert.ok(logs[0].error.stack); + })(); } // Test: 'error' field serialization @@ -498,8 +519,10 @@ const path = require('node:path'); logger.error({ msg: 'Operation failed', error }); - assert.strictEqual(logs.length, 1); - assert.strictEqual(logs[0].error.message, 'Test error'); - assert.strictEqual(logs[0].error.code, 'TEST_CODE'); - assert.ok(logs[0].error.stack); + common.mustSucceed(() => { + assert.strictEqual(logs.length, 1); + assert.strictEqual(logs[0].error.message, 'Test error'); + assert.strictEqual(logs[0].error.code, 'TEST_CODE'); + assert.ok(logs[0].error.stack); + })(); } From 83a12b70258efd8c58d33e92d135b4d1560e9f2f Mon Sep 17 00:00:00 2001 From: Mert Can Altin Date: Wed, 10 Dec 2025 23:41:33 +0300 Subject: [PATCH 13/13] docs: add LogConsumer class and enabled method documentation --- doc/api/logger.md | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/doc/api/logger.md b/doc/api/logger.md index b72cbe5159a9a4..426ff141e88e6b 100644 --- a/doc/api/logger.md +++ b/doc/api/logger.md @@ -8,3 +8,33 @@ The `node:logger` module provides structured logging capabilities for Node.js applications. + +## Class: `LogConsumer` + +### `consumer.enabled(level)` + + + +* `level` {string} The log level to check (e.g., `'debug'`, `'info'`, `'warn'`, + `'error'`, `'fatal'`). +* Returns: {boolean} `true` if the level is enabled, `false` otherwise. + +Checks if a specific log level is enabled for this consumer. + +This method returns `false` for unknown log levels without throwing an error. +Log levels are case-sensitive and must be one of the predefined levels: +`'trace'`, `'debug'`, `'info'`, `'warn'`, `'error'`, `'fatal'`. + +```js +const { LogConsumer } = require('node:logger'); + +const consumer = new LogConsumer({ level: 'info' }); + +console.log(consumer.enabled('debug')); // false (below threshold) +console.log(consumer.enabled('info')); // true +console.log(consumer.enabled('error')); // true +console.log(consumer.enabled('DEBUG')); // false (unknown level - case sensitive) +console.log(consumer.enabled('unknown')); // false (unknown level) +```