Compare commits

...

23 Commits

Author SHA1 Message Date
Gareth Jones
fb072dd70d 0.6.20 2014-08-26 09:59:10 +10:00
Gareth Jones
af1ce2933b Merge pull request #236 from jchmura/filter-max-level
Added option for maximum level in logLevelFilter
2014-08-26 09:55:13 +10:00
Jakub Chmura
ade6dd8ea0 Adjusting and adding new tests for logLevelFilter to handle the maxLevel option. 2014-08-25 20:56:37 +02:00
Jakub Chmura
00c62c7fa6 Adding maxLevel to levelFilter.
This optional parameter specifies what maximum level of event is accepted by the filter.
2014-08-25 19:48:15 +02:00
Gareth Jones
ae04cc9a4a 0.6.19 2014-08-21 21:21:16 +10:00
Gareth Jones
70a9444f4d Merge pull request #235 from j2ro/master
Pull request for #233
2014-08-21 21:20:27 +10:00
j2ro
3e78fcb630 #233: Adding ability to put pid in log layout pattern 2014-08-20 10:43:48 +01:00
j2ro
44687e1bd1 #233: Adding ability to put pid in log layout pattern 2014-08-20 10:23:31 +01:00
Gareth Jones
8e5754371a 0.6.18 2014-08-20 10:20:57 +01:00
Grégoire Charvet 黑瓜
feef9975c7 Remove test logging 2014-08-20 10:20:57 +01:00
Grégoire Charvet 黑瓜
93695fbfc4 Change spaces by tabs
Keeping constitency with the rest of the code.
2014-08-20 10:20:57 +01:00
Grégoire Charvet 黑瓜
0571089a8b Correctly send message, level and hostname
Fix issue #230 where the level would hold the message and the log level
information would be lost.
2014-08-20 10:20:57 +01:00
Gareth Jones
ab77895555 0.6.18 2014-08-20 09:12:13 +10:00
Gareth Jones
9637be8a41 Merge pull request #232 from GregoireDigbil/fix_loggly_formatting
Fix loggly formatting
2014-08-20 09:11:12 +10:00
Grégoire Charvet 黑瓜
0ecd729f49 Remove test logging 2014-08-18 07:32:23 +08:00
Grégoire Charvet 黑瓜
f9c2e78055 Change spaces by tabs
Keeping constitency with the rest of the code.
2014-08-18 07:31:33 +08:00
Grégoire Charvet 黑瓜
e7267ecf46 Correctly send message, level and hostname
Fix issue #230 where the level would hold the message and the log level
information would be lost.
2014-08-18 07:25:55 +08:00
Gareth Jones
c03185b563 Merge pull request #231 from eurekaa/buffered-logger
added getBufferedLogger function.
2014-08-17 11:00:10 +10:00
Stefano Graziato
c0aa8c5c86 gitignore 2014-08-16 14:52:00 +02:00
Stefano Graziato
59a6703549 getBufferedLogger tested. 2014-08-16 14:22:58 +02:00
Gareth Jones
ceffdf92e4 Removed hook.io appender, because hook.io does not build on systems without python 2014-08-16 12:23:50 +10:00
Stefano Graziato
c9e72d0f00 added getBufferedLogger function.
This function should be useful when you need to log during async
parallel operations, without having a mess in logs.
For example when you walk asynchronously a directory and you want logs
to be grouped by file.

It returns the same getLogger() object but messages are stored
internally and sent to appenders only when you call the flush() method
on it.
2014-08-15 13:42:08 +02:00
Gareth Jones
a27345461b altering the timings on some tests to make them slightly less flaky (maybe) 2014-08-15 20:06:35 +10:00
14 changed files with 175 additions and 309 deletions

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ node_modules
.bob/
test/streams/test-rolling-file-stream*
test/streams/test-rolling-stream-with-existing-files*
.idea

View File

@@ -1,76 +0,0 @@
"use strict";
var log4js = require('../log4js')
, layouts = require('../layouts')
, Hook = require('hook.io').Hook
, util = require('util');
var Logger = function createLogger(options) {
var self = this;
var actualAppender = options.actualAppender;
Hook.call(self, options);
self.on('hook::ready', function hookReady() {
self.on('*::' + options.name + '::log', function log(loggingEvent) {
deserializeLoggingEvent(loggingEvent);
actualAppender(loggingEvent);
});
});
};
util.inherits(Logger, Hook);
function deserializeLoggingEvent(loggingEvent) {
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level.toString = function levelToString() {
return loggingEvent.level.levelStr;
};
}
function initHook(hookioOptions) {
var loggerHook;
if (hookioOptions.mode === 'master') {
// Start the master hook, handling the actual logging
loggerHook = new Logger(hookioOptions);
} else {
// Start a worker, just emitting events for a master
loggerHook = new Hook(hookioOptions);
}
loggerHook.start();
return loggerHook;
}
function getBufferedHook(hook, eventName) {
var hookBuffer = [];
var hookReady = false;
hook.on('hook::ready', function emptyBuffer() {
hookBuffer.forEach(function logBufferItem(loggingEvent) {
hook.emit(eventName, loggingEvent);
});
hookReady = true;
});
return function log(loggingEvent) {
if (hookReady) {
hook.emit(eventName, loggingEvent);
} else {
hookBuffer.push(loggingEvent);
}
};
}
function createAppender(hookioOptions) {
var loggerHook = initHook(hookioOptions);
var loggerEvent = hookioOptions.name + '::log';
return getBufferedHook(loggerHook, loggerEvent);
}
function configure(config) {
var actualAppender;
if (config.appender && config.mode === 'master') {
log4js.loadAppender(config.appender.type);
actualAppender = log4js.appenderMakers[config.appender.type](config.appender);
config.actualAppender = actualAppender;
}
return createAppender(config);
}
exports.appender = createAppender;
exports.configure = configure;

View File

@@ -2,10 +2,12 @@
var levels = require('../levels')
, log4js = require('../log4js');
function logLevelFilter (levelString, appender) {
var level = levels.toLevel(levelString);
function logLevelFilter (minLevelString, maxLevelString, appender) {
var minLevel = levels.toLevel(minLevelString);
var maxLevel = levels.toLevel(maxLevelString, levels.FATAL);
return function(logEvent) {
if (logEvent.level.isGreaterThanOrEqualTo(level)) {
var eventLevel = logEvent.level;
if (eventLevel.isGreaterThanOrEqualTo(minLevel) && eventLevel.isLessThanOrEqualTo(maxLevel)) {
appender(logEvent);
}
};
@@ -14,7 +16,7 @@ function logLevelFilter (levelString, appender) {
function configure(config) {
log4js.loadAppender(config.appender.type);
var appender = log4js.appenderMakers[config.appender.type](config.appender);
return logLevelFilter(config.level, appender);
return logLevelFilter(config.level, config.maxLevel, appender);
}
exports.appender = logLevelFilter;

View File

@@ -18,24 +18,17 @@ var layouts = require('../layouts')
*/
function logglyAppender(config, layout) {
var client = loggly.createClient(config);
if(!layout) layout = passThrough;
function packageMessage(loggingEvent) {
function BaseItem(level, msg) {
this.level = level || loggingEvent.level.toString();
this.category = loggingEvent.categoryName;
this.hostname = os.hostname().toString();
if (typeof msg !== 'undefined')
this.msg = msg;
};
var formattedMsg = passThrough(loggingEvent);
return new BaseItem(formattedMsg);
};
return function(loggingEvent) {
var a = layout ? layout(loggingEvent) : packageMessage(loggingEvent);
client.log(a, config.tags);
};
return function(loggingEvent) {
var msg = layout(loggingEvent);
client.log({
msg: msg,
level: loggingEvent.level.levelStr,
category: loggingEvent.categoryName,
hostname: os.hostname().toString(),
});
}
}
function configure(config) {

View File

@@ -125,6 +125,7 @@ function messagePassThroughLayout (loggingEvent) {
* - %d date in various formats
* - %% %
* - %n newline
* - %z pid
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* You can use %[ and %] to define a colored block.
*
@@ -144,7 +145,7 @@ function messagePassThroughLayout (loggingEvent) {
*/
function patternLayout (pattern, tokens) {
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprx%])(\{([^\}]+)\})?|([^%]+)/;
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzx%])(\{([^\}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
@@ -211,6 +212,10 @@ function patternLayout (pattern, tokens) {
return '%';
}
function pid() {
return process.pid;
}
function userDefined(loggingEvent, specifier) {
if (typeof(tokens[specifier]) !== 'undefined') {
if (typeof(tokens[specifier]) === 'function') {
@@ -232,6 +237,7 @@ function patternLayout (pattern, tokens) {
'r': startTime,
'[': startColour,
']': endColour,
'z': pid,
'%': percent,
'x': userDefined
};

View File

@@ -70,6 +70,29 @@ function hasLogger(logger) {
}
function getBufferedLogger(categoryName) {
var base_logger = getLogger(categoryName);
var logger = {};
logger.temp = [];
logger.target = base_logger;
logger.flush = function () {
for (var i = 0; i < logger.temp.length; i++) {
var log = logger.temp[i];
logger.target[log.level](log.message);
delete logger.temp[i];
}
};
logger.trace = function (message) { logger.temp.push({level: 'trace', message: message}); };
logger.debug = function (message) { logger.temp.push({level: 'debug', message: message}); };
logger.info = function (message) { logger.temp.push({level: 'info', message: message}); };
logger.warn = function (message) { logger.temp.push({level: 'warn', message: message}); };
logger.error = function (message) { logger.temp.push({level: 'error', message: message}); };
logger.fatal = function (message) { logger.temp.push({level: 'fatal', message: message}); };
return logger;
}
/**
* Get a logger instance. Instance is cached on categoryName level.
* @param {String} categoryName name of category to log to.
@@ -375,6 +398,7 @@ function shutdown(cb) {
}
module.exports = {
getBufferedLogger: getBufferedLogger,
getLogger: getLogger,
getDefaultLogger: getDefaultLogger,
hasLogger: hasLogger,

View File

@@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.6.17",
"version": "0.6.20",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
@@ -35,7 +35,6 @@
"devDependencies": {
"vows": "0.7.0",
"sandboxed-module": "0.1.3",
"hook.io": "0.8.10",
"underscore": "1.2.1"
},
"browser": {

View File

@@ -16,24 +16,24 @@ function remove(filename) {
vows.describe('log4js categoryFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger;
log4js.clearAppenders();
var appender = require('../lib/appenders/categoryFilter')
.appender(
['app'],
['app'],
function(evt) { logEvents.push(evt); }
);
log4js.addAppender(appender, ["app","web"]);
webLogger = log4js.getLogger("web");
appLogger = log4js.getLogger("app");
webLogger.debug('This should get logged');
appLogger.debug('This should not');
webLogger.debug('Hello again');
log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway');
return logEvents;
},
'should only pass matching category' : function(logEvents) {
@@ -42,25 +42,25 @@ vows.describe('log4js categoryFilter').addBatch({
assert.equal(logEvents[1].data[0], 'Hello again');
}
},
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
, logger, weblogger;
remove(__dirname + '/categoryFilter-web.log');
remove(__dirname + '/categoryFilter-noweb.log');
log4js.configure('test/with-categoryFilter.json');
logger = log4js.getLogger("app");
weblogger = log4js.getLogger("web");
logger.info('Loading app');
logger.debug('Initialising indexes');
weblogger.info('00:00:00 GET / 200');
weblogger.warn('00:00:00 GET / 500');
//wait for the file system to catch up
setTimeout(this.callback, 100);
setTimeout(this.callback, 500);
},
'tmp-tests.log': {
topic: function() {

View File

@@ -1,176 +0,0 @@
"use strict";
var vows = require('vows')
, assert = require('assert')
, sandbox = require('sandboxed-module');
function fancyResultingHookioAppender(hookNotReady) {
var emitHook = !hookNotReady
, result = { ons: {}, emissions: {}, logged: [], configs: [] };
var fakeLog4Js = {
appenderMakers: {}
};
fakeLog4Js.loadAppender = function (appender) {
fakeLog4Js.appenderMakers[appender] = function (config) {
result.actualLoggerConfig = config;
return function log(logEvent) {
result.logged.push(logEvent);
};
};
};
var fakeHookIo = { Hook: function(config) { result.configs.push(config); } };
fakeHookIo.Hook.prototype.start = function () {
result.startCalled = true;
};
fakeHookIo.Hook.prototype.on = function (eventName, functionToExec) {
result.ons[eventName] = { functionToExec: functionToExec };
if (emitHook && eventName === 'hook::ready') {
functionToExec();
}
};
fakeHookIo.Hook.prototype.emit = function (eventName, data) {
result.emissions[eventName] = result.emissions[eventName] || [];
result.emissions[eventName].push({data: data});
var on = '*::' + eventName;
if (eventName !== 'hook::ready' && result.ons[on]) {
result.ons[on].callingCount =
result.ons[on].callingCount ? result.ons[on].callingCount += 1 : 1;
result.ons[on].functionToExec(data);
}
};
return { theResult: result,
theModule: sandbox.require('../lib/appenders/hookio', {
requires: {
'../log4js': fakeLog4Js,
'hook.io': fakeHookIo
}
})
};
}
vows.describe('log4js hookioAppender').addBatch({
'master': {
topic: function() {
var fancy = fancyResultingHookioAppender();
var logger = fancy.theModule.configure(
{
name: 'ohno',
mode: 'master',
'hook-port': 5001,
appender: { type: 'file' }
}
);
logger(
{
level: { levelStr: 'INFO' },
data: "ALRIGHTY THEN",
startTime: '2011-10-27T03:53:16.031Z'
}
);
logger(
{
level: { levelStr: 'DEBUG' },
data: "OH WOW",
startTime: '2011-10-27T04:53:16.031Z'
}
);
return fancy.theResult;
},
'should write to the actual appender': function (result) {
assert.isTrue(result.startCalled);
assert.equal(result.configs.length, 1);
assert.equal(result.configs[0]['hook-port'], 5001);
assert.equal(result.logged.length, 2);
assert.equal(result.emissions['ohno::log'].length, 2);
assert.equal(result.ons['*::ohno::log'].callingCount, 2);
},
'data written should be formatted correctly': function (result) {
assert.equal(result.logged[0].level.toString(), 'INFO');
assert.equal(result.logged[0].data, 'ALRIGHTY THEN');
assert.isTrue(typeof(result.logged[0].startTime) === 'object');
assert.equal(result.logged[1].level.toString(), 'DEBUG');
assert.equal(result.logged[1].data, 'OH WOW');
assert.isTrue(typeof(result.logged[1].startTime) === 'object');
},
'the actual logger should get the right config': function (result) {
assert.equal(result.actualLoggerConfig.type, 'file');
}
},
'worker': {
'should emit logging events to the master': {
topic: function() {
var fancy = fancyResultingHookioAppender();
var logger = fancy.theModule.configure({
name: 'ohno',
mode: 'worker',
appender: { type: 'file' }
});
logger({
level: { levelStr: 'INFO' },
data: "ALRIGHTY THEN",
startTime: '2011-10-27T03:53:16.031Z'
});
logger({
level: { levelStr: 'DEBUG' },
data: "OH WOW",
startTime: '2011-10-27T04:53:16.031Z'
});
return fancy.theResult;
},
'should not write to the actual appender': function (result) {
assert.isTrue(result.startCalled);
assert.equal(result.logged.length, 0);
assert.equal(result.emissions['ohno::log'].length, 2);
assert.isUndefined(result.ons['*::ohno::log']);
}
}
},
'when hook not ready': {
topic: function() {
var fancy = fancyResultingHookioAppender(true)
, logger = fancy.theModule.configure({
name: 'ohno',
mode: 'worker'
});
logger({
level: { levelStr: 'INFO' },
data: "something",
startTime: '2011-10-27T03:45:12.031Z'
});
return fancy;
},
'should buffer the log events': function(fancy) {
assert.isUndefined(fancy.theResult.emissions['ohno::log']);
},
},
'when hook ready': {
topic: function() {
var fancy = fancyResultingHookioAppender(true)
, logger = fancy.theModule.configure({
name: 'ohno',
mode: 'worker'
});
logger({
level: { levelStr: 'INFO' },
data: "something",
startTime: '2011-10-27T03:45:12.031Z'
});
fancy.theResult.ons['hook::ready'].functionToExec();
return fancy;
},
'should emit the buffered events': function(fancy) {
assert.equal(fancy.theResult.emissions['ohno::log'].length, 1);
}
}
}).exportTo(module);

View File

@@ -217,6 +217,9 @@ vows.describe('log4js layouts').addBatch({
'%h should output hostname' : function(args) {
test(args, '%h', os.hostname().toString());
},
'%z should output pid' : function(args) {
test(args, '%z', process.pid);
},
'%c should handle category names like java-style package names': function(args) {
test(args, '%c{1}', 'tests');
test(args, '%c{2}', 'of.tests');

View File

@@ -21,12 +21,13 @@ vows.describe('log4js logLevelFilter').addBatch({
log4js.addAppender(
require('../lib/appenders/logLevelFilter')
.appender(
'ERROR',
'ERROR',
undefined,
function(evt) { logEvents.push(evt); }
),
),
"logLevelTest"
);
logger = log4js.getLogger("logLevelTest");
logger.debug('this should not trigger an event');
logger.warn('neither should this');
@@ -45,18 +46,21 @@ vows.describe('log4js logLevelFilter').addBatch({
topic: function() {
var log4js = require('../lib/log4js')
, logger;
remove(__dirname + '/logLevelFilter.log');
remove(__dirname + '/logLevelFilter-warnings.log');
remove(__dirname + '/logLevelFilter-debugs.log');
log4js.configure('test/with-logLevelFilter.json');
logger = log4js.getLogger("tests");
logger.info('main');
logger.error('both');
logger.warn('both');
logger.debug('main');
logger.debug('debug');
logger.info('info');
logger.error('error');
logger.warn('warn');
logger.debug('debug');
logger.trace('trace');
//wait for the file system to catch up
setTimeout(this.callback, 100);
setTimeout(this.callback, 500);
},
'tmp-tests.log': {
topic: function() {
@@ -64,7 +68,7 @@ vows.describe('log4js logLevelFilter').addBatch({
},
'should contain all log messages': function (contents) {
var messages = contents.trim().split(EOL);
assert.deepEqual(messages, ['main','both','both','main']);
assert.deepEqual(messages, ['debug','info','error','warn','debug','trace']);
}
},
'tmp-tests-warnings.log': {
@@ -73,7 +77,16 @@ vows.describe('log4js logLevelFilter').addBatch({
},
'should contain only error and warning log messages': function(contents) {
var messages = contents.trim().split(EOL);
assert.deepEqual(messages, ['both','both']);
assert.deepEqual(messages, ['error','warn']);
}
},
'tmp-tests-debugs.log': {
topic: function() {
fs.readFile(__dirname + '/logLevelFilter-debugs.log','utf8',this.callback);
},
'should contain only trace and debug log messages': function(contents) {
var messages = contents.trim().split(EOL);
assert.deepEqual(messages, ['debug','debug','trace']);
}
}
}

View File

@@ -32,6 +32,72 @@ function setupConsoleTest() {
}
vows.describe('log4js').addBatch({
'getBufferedLogger': {
topic: function () {
var log4js = require('../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests');
return logger;
},
'should take a category and return a logger': function (logger) {
assert.equal(logger.target.category, 'tests');
assert.isFunction(logger.flush);
assert.isFunction(logger.trace);
assert.isFunction(logger.debug);
assert.isFunction(logger.info);
assert.isFunction(logger.warn);
assert.isFunction(logger.error);
assert.isFunction(logger.fatal);
},
'cache events': {
topic: function () {
var log4js = require('../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests1');
var events = [];
logger.target.addListener("log", function (logEvent) { events.push(logEvent); });
logger.debug("Debug event");
logger.trace("Trace event 1");
logger.trace("Trace event 2");
logger.warn("Warning event");
logger.error("Aargh!", new Error("Pants are on fire!"));
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
return events;
},
'should not emit log events if .flush() is not called.': function (events) {
assert.equal(events.length, 0);
}
},
'log events after flush() is called': {
topic: function () {
var log4js = require('../lib/log4js');
log4js.clearAppenders();
var logger = log4js.getBufferedLogger('tests2');
logger.target.setLevel("TRACE");
var events = [];
logger.target.addListener("log", function (logEvent) { events.push(logEvent); });
logger.debug("Debug event");
logger.trace("Trace event 1");
logger.trace("Trace event 2");
logger.warn("Warning event");
logger.error("Aargh!", new Error("Pants are on fire!"));
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
logger.flush();
return events;
},
'should emit log events when .flush() is called.': function (events) {
assert.equal(events.length, 6);
}
}
},
'getLogger': {
topic: function() {
var log4js = require('../lib/log4js');

View File

@@ -7,7 +7,7 @@ var vows = require('vows')
function setupLogging(category, options) {
var msgs = [];
var fakeMailer = {
createTransport: function (name, options) {
return {
@@ -49,7 +49,7 @@ function setupLogging(category, options) {
});
log4js.addAppender(smtpModule.configure(options), category);
return {
logger: log4js.getLogger(category),
mailer: fakeMailer,
@@ -150,10 +150,10 @@ vows.describe('log4js smtpAppender').addBatch({
}, 500);
setTimeout(function () {
setup.logger.info('Log event #3');
}, 1050);
}, 1100);
setTimeout(function () {
self.callback(null, setup);
}, 2100);
}, 3000);
},
'there should be three messages': function (result) {
assert.equal(result.results.length, 3);
@@ -181,13 +181,13 @@ vows.describe('log4js smtpAppender').addBatch({
}, 0);
setTimeout(function () {
setup.logger.info('Log event #2');
}, 500);
}, 100);
setTimeout(function () {
setup.logger.info('Log event #3');
}, 1050);
}, 1500);
setTimeout(function () {
self.callback(null, setup);
}, 2100);
}, 3000);
},
'there should be two messages': function (result) {
assert.equal(result.results.length, 2);
@@ -218,7 +218,7 @@ vows.describe('log4js smtpAppender').addBatch({
close: function() { }
};
};
setup.logger.info("This will break");
return setup.console;
},
@@ -228,6 +228,4 @@ vows.describe('log4js smtpAppender').addBatch({
assert.equal(cons.errors[0].value.message, 'oh noes');
}
}
}).export(module);

View File

@@ -12,6 +12,19 @@
}
}
},
{
"category": "tests",
"type": "logLevelFilter",
"level": "TRACE",
"maxLevel": "DEBUG",
"appender": {
"type": "file",
"filename": "test/logLevelFilter-debugs.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "tests",
"type": "file",
@@ -23,6 +36,6 @@
],
"levels": {
"tests": "DEBUG"
"tests": "TRACE"
}
}