Compare commits

..

9 Commits

Author SHA1 Message Date
Gareth Jones
6aacb0da0b Merge pull request #195 from jengler/flush-on-exit
Flush on exit
2014-04-09 07:35:25 +10:00
John Engler
6e3da6f44b Added error throwing when error loading test file.
This will hopefully give us better visibility into our Travis CI
build failures.
2014-04-08 12:40:27 -07:00
John Engler
3b5eb28115 Update dateFile EOL usage to be consistent with appender.
From the looks of the Travis CI failure, this could be the issue
causing failures. Not sure as I can't reproduce locally. However,
it is still an inconsistency and worth fixing.
2014-04-08 10:47:18 -07:00
John Engler
633ed3cddb Support for disabling log writes on shutdown.
Updated logger.js to support disabling all log writes.
Updated log4js.js shutdown function to disable log writes.
Added tests.
Update gitignore to ignore rolling date stream's test output.
2014-04-07 19:06:29 -07:00
John Engler
8ca092cdb9 Removed callback to write, as it is not needed. 2014-04-05 16:14:56 -07:00
John Engler
3ec9811b5e Update log4js module to expose a shutdown function.
loadAppender will check for a shutdown function exposed by
a loaded appender. If present, it will be cached so that the
shutdown function can execute it.

The intent here is that a Node application would not invoked
process.exit until after the log4js shutdown callback returns.
2014-04-05 15:12:45 -07:00
Gareth Jones
2a38f460dc tried adding process.nexttick - didn't help 2013-08-05 07:56:02 +10:00
Gareth Jones
9f77734f74 test case for flush on exit 2013-08-05 07:55:07 +10:00
Gareth Jones
ce8b6b06b9 trying out a shutdown function 2013-08-05 07:21:12 +10:00
22 changed files with 192 additions and 428 deletions

1
.gitignore vendored
View File

@@ -4,3 +4,4 @@ build
node_modules
.bob/
test/streams/test-rolling-file-stream*
test/streams/test-rolling-stream-with-existing-files*

27
examples/flush-on-exit.js Normal file
View File

@@ -0,0 +1,27 @@
/**
* run this, then "ab -c 10 -n 100 localhost:4444/" to test (in
* another shell)
*/
var log4js = require('../lib/log4js');
log4js.configure({
appenders: [
{ type: 'file', filename: 'cheese.log', category: 'cheese' },
{ type: 'console'}
]
});
var logger = log4js.getLogger('cheese');
logger.setLevel('INFO');
var http=require('http');
var server = http.createServer(function(request, response){
response.writeHead(200, {'Content-Type': 'text/plain'});
var rd = Math.random() * 50;
logger.info("hello " + rd);
response.write('hello ');
if (Math.floor(rd) == 30){
log4js.shutdown(function() { process.exit(1); });
}
response.end();
}).listen(4444);

View File

@@ -1,20 +0,0 @@
"use strict";
var log4js = require('../log4js');
function categoryFilter (excludes, appender) {
if (typeof(excludes) === 'string') excludes = [excludes];
return function(logEvent) {
if (excludes.indexOf(logEvent.categoryName) === -1) {
appender(logEvent);
}
};
}
function configure(config) {
log4js.loadAppender(config.appender.type);
var appender = log4js.appenderMakers[config.appender.type](config.appender);
return categoryFilter(config.exclude, appender);
}
exports.appender = categoryFilter;
exports.configure = configure;

View File

@@ -1,126 +0,0 @@
"use strict";
var cluster = require('cluster');
var log4js = require('../log4js');
/**
* Takes a loggingEvent object, returns string representation of it.
*/
function serializeLoggingEvent(loggingEvent) {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
for (var i = 0; i < loggingEvent.data.length; i++) {
var item = loggingEvent.data[i];
if (item && item.stack && JSON.stringify(item) === '{}') { // Validate that we really are in this case
loggingEvent.data[i] = {stack : item.stack};
}
}
return JSON.stringify(loggingEvent);
}
/**
* Takes a string, returns an object with
* the correct log properties.
*
* This method has been "borrowed" from the `multiprocess` appender
* by `nomiddlename` (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
*
* Apparently, node.js serializes everything to strings when using `process.send()`,
* so we need smart deserialization that will recreate log date and level for further processing by log4js internals.
*/
function deserializeLoggingEvent(loggingEventString) {
var loggingEvent;
try {
loggingEvent = JSON.parse(loggingEventString);
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
} catch (e) {
// JSON.parse failed, just log the contents probably a naughty.
loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: log4js.levels.ERROR,
data: [ 'Unable to parse log:', loggingEventString ]
};
}
return loggingEvent;
}
/**
* Creates an appender.
*
* If the current process is a master (`cluster.isMaster`), then this will be a "master appender".
* Otherwise this will be a worker appender, that just sends loggingEvents to the master process.
*
* If you are using this method directly, make sure to provide it with `config.actualAppenders` array
* of actual appender instances.
*
* Or better use `configure(config, options)`
*/
function createAppender(config) {
if (cluster.isMaster) {
var masterAppender = function(loggingEvent) {
if (config.actualAppenders) {
var size = config.actualAppenders.length;
for(var i = 0; i < size; i++) {
config.actualAppenders[i](loggingEvent);
}
}
}
// Listen on new workers
cluster.on('fork', function(worker) {
worker.on('message', function(message) {
if (message.type && message.type === '::log-message') {
// console.log("master : " + cluster.isMaster + " received message: " + JSON.stringify(message.event));
var loggingEvent = deserializeLoggingEvent(message.event);
masterAppender(loggingEvent);
}
});
});
return masterAppender;
} else {
return function(loggingEvent) {
// If inside the worker process, then send the logger event to master.
if (cluster.isWorker) {
// console.log("worker " + cluster.worker.id + " is sending message");
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)});
}
}
}
}
function configure(config, options) {
if (config.appenders && cluster.isMaster) {
var size = config.appenders.length;
config.actualAppenders = new Array(size);
for(var i = 0; i < size; i++) {
log4js.loadAppender(config.appenders[i].type);
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](config.appenders[i], options);
}
}
return createAppender(config);
}
exports.appender = createAppender;
exports.configure = configure;

View File

@@ -1,5 +1,6 @@
"use strict";
var layouts = require('../layouts')
, async = require('async')
, path = require('path')
, fs = require('fs')
, streams = require('../streams')
@@ -78,5 +79,16 @@ function configure(config, options) {
return fileAppender(config.filename, layout, config.maxLogSize, config.backups);
}
function shutdown(cb) {
async.forEach(openFiles, function(file, done) {
if (!file.write(eol, "utf-8")) {
file.once('drain', function() {
file.end(done);
});
}
}, cb);
}
exports.appender = fileAppender;
exports.configure = configure;
exports.shutdown = shutdown;

View File

@@ -100,7 +100,7 @@ function gelfAppender (layout, host, port, hostname, facility) {
msg.short_message = msg.full_message;
msg.version="1.0";
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
msg.timestamp = msg.timestamp || new Date().getTime() / 1000 >> 0;
msg.host = hostname;
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
msg.facility = facility;

View File

@@ -94,11 +94,6 @@ function workerAppender(config) {
}
function write(loggingEvent) {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') { // Validate that we really are in this case
loggingEvent = {stack : loggingEvent.stack};
}
socket.write(JSON.stringify(loggingEvent), 'utf8');
socket.write(END_MSG, 'utf8');
}

View File

@@ -45,7 +45,7 @@ exports.asString = function(/*format,*/ date) {
var vDay = addZero(date.getDate());
var vMonth = addZero(date.getMonth()+1);
var vYearLong = addZero(date.getFullYear());
var vYearShort = addZero(date.getFullYear().toString().substring(2,4));
var vYearShort = addZero(date.getFullYear().toString().substring(3,4));
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
var vHour = addZero(date.getHours());
var vMinute = addZero(date.getMinutes());

View File

@@ -120,7 +120,6 @@ function messagePassThroughLayout (loggingEvent) {
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in various formats
* - %% %
@@ -144,7 +143,7 @@ function messagePassThroughLayout (loggingEvent) {
*/
function patternLayout (pattern, tokens) {
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprx%])(\{([^\}]+)\})?|([^%]+)/;
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdmnprx%])(\{([^\}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
@@ -167,8 +166,6 @@ function patternLayout (pattern, tokens) {
// Pick up special cases
if (format == "ISO8601") {
format = dateFormat.ISO8601_FORMAT;
} else if (format == "ISO8601_WITH_TZ_OFFSET") {
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
} else if (format == "ABSOLUTE") {
format = dateFormat.ABSOLUTETIME_FORMAT;
} else if (format == "DATE") {
@@ -178,10 +175,6 @@ function patternLayout (pattern, tokens) {
// Format the date
return dateFormat.asString(format, loggingEvent.startTime);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return formatLogData(loggingEvent.data);
@@ -225,7 +218,6 @@ function patternLayout (pattern, tokens) {
var replacers = {
'c': categoryName,
'd': formatAsDate,
'h': hostname,
'm': formatMessage,
'n': endOfLine,
'p': logLevel,

View File

@@ -44,17 +44,20 @@
* Website: http://log4js.berlios.de
*/
var events = require('events')
, async = require('async')
, fs = require('fs')
, path = require('path')
, util = require('util')
, layouts = require('./layouts')
, levels = require('./levels')
, LoggingEvent = require('./logger').LoggingEvent
, Logger = require('./logger').Logger
, loggerModule = require('./logger')
, LoggingEvent = loggerModule.LoggingEvent
, Logger = loggerModule.Logger
, ALL_CATEGORIES = '[all]'
, appenders = {}
, loggers = {}
, appenderMakers = {}
, appenderShutdowns = {}
, defaultConfig = {
appenders: [
{ type: "console" }
@@ -298,9 +301,42 @@ function loadAppender(appender) {
appenderModule = require(appender);
}
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
if (appenderModule.shutdown) {
appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
}
appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
}
/**
* Shutdown all log appenders. This will first disable all writing to appenders
* and then call the shutdown function each appender.
*
* @params {Function} cb - The callback to be invoked once all appenders have
* shutdown. If an error occurs, the callback will be given the error object
* as the first argument.
* @returns {void}
*/
function shutdown(cb) {
// First, disable all writing to appenders. This prevents appenders from
// not being able to be drained because of run-away log writes.
loggerModule.disableAllLogWrites();
// Next, get all the shutdown functions for appenders as an array.
var shutdownFunctions = Object.keys(appenderShutdowns).reduce(
function(accum, category) {
return accum.concat(appenderShutdowns[category]);
}, []);
// Call each of the shutdown functions.
async.forEach(
shutdownFunctions,
function(shutdownFn, done) {
shutdownFn(done);
},
cb
);
}
module.exports = {
getLogger: getLogger,
getDefaultLogger: getDefaultLogger,
@@ -309,6 +345,7 @@ module.exports = {
loadAppender: loadAppender,
clearAppenders: clearAppenders,
configure: configure,
shutdown: shutdown,
replaceConsole: replaceConsole,
restoreConsole: restoreConsole,

View File

@@ -4,6 +4,8 @@ var levels = require('./levels')
, events = require('events')
, DEFAULT_CATEGORY = '[default]';
var logWritesEnabled = true;
/**
* Models a logging event.
* @constructor
@@ -66,7 +68,7 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
};
Logger.prototype[levelString.toLowerCase()] = function () {
if (this.isLevelEnabled(level)) {
if (logWritesEnabled && this.isLevelEnabled(level)) {
var args = Array.prototype.slice.call(arguments);
args.unshift(level);
Logger.prototype.log.apply(this, args);
@@ -75,6 +77,23 @@ Logger.prototype.isLevelEnabled = function(otherLevel) {
}
);
/**
* Disable all log writes.
* @returns {void}
*/
function disableAllLogWrites() {
logWritesEnabled = false;
}
/**
* Enable log writes.
* @returns {void}
*/
function enableAllLogWrites() {
logWritesEnabled = true;
}
exports.LoggingEvent = LoggingEvent;
exports.Logger = Logger;
exports.disableAllLogWrites = disableAllLogWrites;
exports.enableAllLogWrites = enableAllLogWrites;

View File

@@ -1,6 +1,6 @@
{
"name": "log4js",
"version": "0.6.9",
"version": "0.6.7",
"description": "Port of Log4js to work with node.",
"keywords": [
"logging",
@@ -29,6 +29,7 @@
},
"dependencies": {
"async": "0.1.15",
"dequeue": "1.0.3",
"semver": "~1.1.4",
"readable-stream": "~1.0.2"
},
@@ -37,8 +38,5 @@
"sandboxed-module": "0.1.3",
"hook.io": "0.8.10",
"underscore": "1.2.1"
},
"browser": {
"os": false
}
}

View File

@@ -1,83 +0,0 @@
'use strict';
var vows = require('vows')
, fs = require('fs')
, assert = require('assert');
function remove(filename) {
try {
fs.unlinkSync(filename);
} catch (e) {
//doesn't really matter if it failed
}
}
vows.describe('log4js categoryFilter').addBatch({
'appender': {
topic: function() {
var log4js = require('../lib/log4js'), logEvents = [], webLogger, appLogger;
log4js.clearAppenders();
var appender = require('../lib/appenders/categoryFilter')
.appender(
['app'],
function(evt) { logEvents.push(evt); }
);
log4js.addAppender(appender, ["app","web"]);
webLogger = log4js.getLogger("web");
appLogger = log4js.getLogger("app");
webLogger.debug('This should get logged');
appLogger.debug('This should not');
webLogger.debug('Hello again');
log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway');
return logEvents;
},
'should only pass matching category' : function(logEvents) {
assert.equal(logEvents.length, 2);
assert.equal(logEvents[0].data[0], 'This should get logged');
assert.equal(logEvents[1].data[0], 'Hello again');
}
},
'configure': {
topic: function() {
var log4js = require('../lib/log4js')
, logger, weblogger;
remove(__dirname + '/categoryFilter-web.log');
remove(__dirname + '/categoryFilter-noweb.log');
log4js.configure('test/with-categoryFilter.json');
logger = log4js.getLogger("app");
weblogger = log4js.getLogger("web");
logger.info('Loading app');
logger.debug('Initialising indexes');
weblogger.info('00:00:00 GET / 200');
weblogger.warn('00:00:00 GET / 500');
//wait for the file system to catch up
setTimeout(this.callback, 100);
},
'tmp-tests.log': {
topic: function() {
fs.readFile(__dirname + '/categoryFilter-noweb.log', 'utf8', this.callback);
},
'should contain all log messages': function(contents) {
var messages = contents.trim().split('\n');
assert.deepEqual(messages, ['Loading app','Initialising indexes']);
}
},
'tmp-tests-web.log': {
topic: function() {
fs.readFile(__dirname + '/categoryFilter-web.log','utf8',this.callback);
},
'should contain only error and warning log messages': function(contents) {
var messages = contents.trim().split('\n');
assert.deepEqual(messages, ['00:00:00 GET / 200','00:00:00 GET / 500']);
}
}
}
}).export(module);

View File

@@ -1,125 +0,0 @@
"use strict";
var assert = require('assert');
var vows = require('vows');
var layouts = require('../lib/layouts');
var sandbox = require('sandboxed-module');
var LoggingEvent = require('../lib/logger').LoggingEvent;
var cluster = require('cluster');
vows.describe('log4js cluster appender').addBatch({
'when in master mode': {
topic: function() {
var registeredClusterEvents = [];
var loggingEvents = [];
// Fake cluster module, so no cluster listeners be really added
var fakeCluster = {
on: function(event, callback) {
registeredClusterEvents.push(event);
},
isMaster: true,
isWorker: false,
};
var fakeActualAppender = function(loggingEvent) {
loggingEvents.push(loggingEvent);
}
// Load appender and fake modules in it
var appenderModule = sandbox.require('../lib/appenders/clustered', {
requires: {
'cluster': fakeCluster,
}
});
var masterAppender = appenderModule.appender({
actualAppenders: [ fakeActualAppender ]
});
// Actual test - log message using masterAppender
masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test']));
var returnValue = {
registeredClusterEvents: registeredClusterEvents,
loggingEvents: loggingEvents,
};
return returnValue;
},
"should register 'fork' event listener on 'cluster'": function(topic) {
assert.equal(topic.registeredClusterEvents[0], 'fork');
},
"should log using actual appender": function(topic) {
assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
},
},
'when in worker mode': {
topic: function() {
var registeredProcessEvents = [];
// Fake cluster module, to fake we're inside a worker process
var fakeCluster = {
isMaster: false,
isWorker: true,
};
var fakeProcess = {
send: function(data) {
registeredProcessEvents.push(data);
},
};
// Load appender and fake modules in it
var appenderModule = sandbox.require('../lib/appenders/clustered', {
requires: {
'cluster': fakeCluster,
},
globals: {
'process': fakeProcess,
}
});
var workerAppender = appenderModule.appender();
// Actual test - log message using masterAppender
workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
var returnValue = {
registeredProcessEvents: registeredProcessEvents,
};
return returnValue;
},
"worker appender should call process.send" : function(topic) {
assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
assert.equal(JSON.parse(topic.registeredProcessEvents[0].event).data[0], "workerAppender test");
},
"worker should serialize an Error correctly" : function(topic) {
assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
var expectedRegex = /^Error: Error test/;
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
}
}
}).exportTo(module);

View File

@@ -4,7 +4,8 @@ var vows = require('vows')
, path = require('path')
, fs = require('fs')
, sandbox = require('sandboxed-module')
, log4js = require('../lib/log4js');
, log4js = require('../lib/log4js')
, EOL = require('os').EOL || '\n';
function removeFile(filename) {
return function() {
@@ -134,7 +135,10 @@ vows.describe('../lib/appenders/dateFile').addBatch({
teardown: removeFile('date-file-test.log'),
'should load appender configuration from a json file': function(err, contents) {
assert.include(contents, 'this should be written to the file' + require('os').EOL);
if (err) {
throw err;
}
assert.include(contents, 'this should be written to the file' + EOL);
assert.equal(contents.indexOf('this should not be written to the file'), -1);
}
},
@@ -161,7 +165,7 @@ vows.describe('../lib/appenders/dateFile').addBatch({
, thisTime = format.asString(options.appenders[0].pattern, new Date());
fs.writeFileSync(
path.join(__dirname, 'date-file-test' + thisTime),
"this is existing data" + require('os').EOL,
"this is existing data" + EOL,
'utf8'
);
log4js.clearAppenders();

View File

@@ -39,13 +39,6 @@ vows.describe('date_format').addBatch({
dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date),
'14:31:30.005'
);
},
'should provide a custom format': function(date) {
date.getTimezoneOffset = function() { return 120; };
assert.equal(
dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
'-0200.005.30.31.14.11.01.10'
);
}
}
}).export(module);

View File

@@ -222,7 +222,7 @@ vows.describe('log4js fileAppender').addBatch({
, logger;
//this config file defines one file appender (to ./tmp-tests.log)
//and sets the log level for "tests" to WARN
log4js.configure('./test/log4js.json');
log4js.configure('test/log4js.json');
logger = log4js.getLogger('tests');
logger.info('this should not be written to the file');
logger.warn('this should be written to the file');

View File

@@ -209,9 +209,6 @@ vows.describe('log4js layouts').addBatch({
'%n should output a new line': function(args) {
test(args, '%n', '\n');
},
'%h should output hostname' : function(args) {
test(args, '%h', require('os').hostname().toString());
},
'%c should handle category names like java-style package names': function(args) {
test(args, '%c{1}', 'tests');
test(args, '%c{2}', 'of.tests');
@@ -224,11 +221,9 @@ vows.describe('log4js layouts').addBatch({
test(args, '%d', '2010-12-05 14:18:30.045');
},
'%d should allow for format specification': function(args) {
test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30-0000');
test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
test(args, '%d{ABSOLUTE}', '14:18:30.045');
test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
test(args, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30');
test(args, '%d{yyyy MM dd}', '2010 12 05');
test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
},

View File

@@ -2,7 +2,8 @@
var vows = require('vows')
, assert = require('assert')
, levels = require('../lib/levels')
, Logger = require('../lib/logger').Logger;
, loggerModule = require('../lib/logger')
, Logger = loggerModule.Logger;
vows.describe('../lib/logger').addBatch({
'constructor with no parameters': {
@@ -53,5 +54,28 @@ vows.describe('../lib/logger').addBatch({
assert.isTrue(logger.isErrorEnabled());
assert.isTrue(logger.isFatalEnabled());
}
},
'should emit log events': {
topic: function() {
var events = [],
logger = new Logger();
logger.addListener('log', function (logEvent) { events.push(logEvent); });
logger.debug('Event 1');
loggerModule.disableAllLogWrites();
logger.debug('Event 2');
loggerModule.enableAllLogWrites();
logger.debug('Event 3');
return events;
},
'when log writes are enabled': function(events) {
assert.equal(events[0].data[0], 'Event 1');
},
'but not when log writes are disabled': function(events) {
assert.equal(events.length, 2);
assert.equal(events[1].data[0], 'Event 3');
}
}
}).exportTo(module);

View File

@@ -75,13 +75,65 @@ vows.describe('log4js').addBatch({
assert.equal(events[1].level.toString(), 'WARN');
},
'should include the error if passed in': function (events) {
'should include the error if passed in': function(events) {
assert.instanceOf(events[2].data[1], Error);
assert.equal(events[2].data[1].message, 'Pants are on fire!');
}
}
},
'when shutdown is called': {
topic: function() {
var events = {
appenderShutdownCalled: false,
shutdownCallbackCalled: false
},
log4js = sandbox.require(
'../lib/log4js',
{
requires: {
'./appenders/file':
{
name: "file",
appender: function() {},
configure: function(configuration) {
return function() {};
},
shutdown: function(cb) {
events.appenderShutdownCalled = true;
cb();
}
}
}
}
),
shutdownCallback = function() {
events.shutdownCallbackCalled = true;
},
config = { appenders:
[ { "type" : "file",
"filename" : "cheesy-wotsits.log",
"maxLogSize" : 1024,
"backups" : 3
}
]
};
log4js.configure(config);
log4js.shutdown(shutdownCallback);
// Re-enable log writing so other tests that use logger are not
// affected.
require('../lib/logger').enableAllLogWrites();
return events;
},
'should invoke appender shutdowns': function(events) {
assert.ok(events.appenderShutdownCalled);
},
'should call callback': function(events) {
assert.ok(events.shutdownCallbackCalled);
}
},
'invalid configuration': {

View File

@@ -75,7 +75,6 @@ vows.describe('Multiprocess Appender').addBatch({
appender('after error, before connect');
fakeNet.cbs.connect();
appender('after error, after connect');
appender(new Error('Error test'));
return fakeNet;
},
@@ -99,13 +98,6 @@ vows.describe('Multiprocess Appender').addBatch({
assert.equal(net.data[6], JSON.stringify('after error, after connect'));
assert.equal(net.data[7], '__LOG4JS__');
assert.equal(net.createConnectionCalled, 2);
},
'should serialize an Error correctly': function(net) {
assert(JSON.parse(net.data[8]).stack, "Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property");
var actual = JSON.parse(net.data[8]).stack;
var expectedRegex = /^Error: Error test/;
assert(actual.match(expectedRegex), "Expected: \n\n " + actual + "\n\n to match " + expectedRegex);
}
},
'worker with timeout': {

View File

@@ -1,23 +0,0 @@
{
"appenders": [
{
"type": "categoryFilter",
"exclude": "web",
"appender": {
"type": "file",
"filename": "test/categoryFilter-noweb.log",
"layout": {
"type": "messagePassThrough"
}
}
},
{
"category": "web",
"type": "file",
"filename": "test/categoryFilter-web.log",
"layout": {
"type": "messagePassThrough"
}
}
]
}